hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7935b3149d10a7e1e1daf34532cf83227323530e
| 192
|
py
|
Python
|
ThreeDLabeler/__init__.py
|
3DLables/3DLabeler
|
dda599ed7b63952775279fb4b2042df90d74e128
|
[
"MIT"
] | 1
|
2020-02-18T17:01:30.000Z
|
2020-02-18T17:01:30.000Z
|
ThreeDLabeler/__init__.py
|
3DLables/3DLabeler
|
dda599ed7b63952775279fb4b2042df90d74e128
|
[
"MIT"
] | 12
|
2019-09-20T18:50:43.000Z
|
2020-08-07T11:01:36.000Z
|
ThreeDLabeler/__init__.py
|
3DLables/3DLabeler
|
dda599ed7b63952775279fb4b2042df90d74e128
|
[
"MIT"
] | null | null | null |
from ThreeDLabeler.preprocessing import package_to_pickle
from ThreeDLabeler.preprocessing import tag_parser
from ThreeDLabeler.images import Image
from ThreeDLabeler.plotting import mri_plot
| 38.4
| 57
| 0.895833
| 24
| 192
| 7
| 0.583333
| 0.404762
| 0.357143
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 192
| 4
| 58
| 48
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f749e42dbbe00bd313f84513e30dce64364c04b0
| 226,490
|
py
|
Python
|
tests/crawling/test_retrieval_extras.py
|
MipsaPatel/stopstalk-deployment
|
bc0332fe120c30385bdbfac426aabe97f783b3a6
|
[
"MIT"
] | null | null | null |
tests/crawling/test_retrieval_extras.py
|
MipsaPatel/stopstalk-deployment
|
bc0332fe120c30385bdbfac426aabe97f783b3a6
|
[
"MIT"
] | null | null | null |
tests/crawling/test_retrieval_extras.py
|
MipsaPatel/stopstalk-deployment
|
bc0332fe120c30385bdbfac426aabe97f783b3a6
|
[
"MIT"
] | null | null | null |
"""
Copyright (c) 2015-2020 Raj Patel(raj454raj@gmail.com), StopStalk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
import requests
import sites
import urllib3
urllib3.disable_warnings()
current.environment = "test"
# ==============================================================================
class RetrievalTest:
# --------------------------------------------------------------------------
def __init__(self):
self.profile_site = {}
for site in current.SITES:
self.profile_site[site] = getattr(sites, site.lower()).Profile
# --------------------------------------------------------------------------
def test_tag_retrieval(self):
sites_with_tags_functionality = ["CodeChef", "CodeForces", "Spoj", "HackerEarth", "HackerRank", "Timus"]
assertion_hash = {
"with_tags": {
"CodeChef": {
"plink": "https://www.codechef.com/PRACTICE/problems/FNCS",
"tags": [u'data-structure', u'devuy11', u'fenwick', u'medium-hard', u'nov14', u'segment-tree', u'sqrt-decomp']
},
"CodeForces": {
"plink": "http://www.codeforces.com/problemset/problem/323/A",
"tags": [u'combinatorics', u'constructive algorithms']
},
"Spoj": {
"plink": "https://www.spoj.com/problems/YODANESS/",
"tags": [u'graph-theory', u'number-theory', u'shortest-path', u'sorting', u'tree', u'bitmasks']
},
"HackerEarth": {
"plink": "https://www.hackerearth.com/practice/algorithms/dynamic-programming/2-dimensional/practice-problems/algorithm/candy-distribution/",
"tags": [u'Dynamic Programming', u'Mathematics', u'Number Theory']
},
"HackerRank": {
"plink": "https://www.hackerrank.com/challenges/print-the-elements-of-a-linked-list",
"tags": [u'Linked Lists']
},
"Timus": {
"plink": "http://acm.timus.ru/problem.aspx?space=1&num=1954&locale=en",
"tags": [u'hardest problem', u'palindromes', u'string algorithms']
}
},
"without_tags": {
"CodeChef": "https://www.codechef.com/ZCOPRAC/problems/ZCO14004",
"CodeForces": "http://www.codeforces.com/problemset/gymProblem/100570/C",
"Spoj": "https://www.spoj.com/problems/TOUR/",
"HackerEarth": "https://www.hackerearth.com/problem/algorithm/find-pairs-1/",
"Timus": "http://acm.timus.ru/problem.aspx?space=1&num=1559&locale=en"
}
}
for site in sites_with_tags_functionality:
P = self.profile_site[site]
if P.is_website_down():
# Don't test for websites which are acked to be down
continue
tags_func = P.get_problem_details
tags_val = tags_func(problem_link=assertion_hash["with_tags"][site]["plink"],
update_things=["tags"])["tags"]
if set(tags_val) != set(assertion_hash["with_tags"][site]["tags"]):
raise RuntimeError(site + " with tags failure")
if site in assertion_hash["without_tags"]:
tags_val = tags_func(problem_link=assertion_hash["without_tags"][site],
update_things=["tags"])["tags"]
if tags_val not in ([u"-"], []):
raise RuntimeError(site + " without tags failure")
# --------------------------------------------------------------------------
def test_editorial_retrieval(self):
sites_with_editorial_functionality = ["CodeChef", "CodeForces", "HackerEarth", "HackerRank"]
assertion_hash = {
"with_editorial": {
"CodeChef": {
"plink": "https://www.codechef.com/LTIME27/problems/INVERT",
"editorial_link": "https://discuss.codechef.com/problems/INVERT"
},
"CodeForces": {
"plink": "http://www.codeforces.com/problemset/problem/102/B",
"editorial_link": "http://www.codeforces.com/blog/entry/2393"
},
"HackerEarth": {
"plink": "https://www.hackerearth.com/problem/approximate/lots-of-circles/",
"editorial_link": "https://www.hackerearth.com/problem/approximate/lots-of-circles/editorial/"
},
"HackerRank": {
"plink": "https://www.hackerrank.com/challenges/candles-2",
"editorial_link": "https://www.hackerrank.com/challenges/candles-2/editorial/"
},
"AtCoder": {
"plink": "https://atcoder.jp/contests/agc035/tasks/agc035_c",
"editorial_link": "https://img.atcoder.jp/agc035/editorial.pdf"
}
},
"without_editorial": {
"CodeChef": "https://www.codechef.com/PRACTICE/problems/PG",
"CodeForces": "http://www.codeforces.com/problemset/problem/234/D",
"HackerEarth": "https://www.hackerearth.com/problem/algorithm/level-selections/"
}
}
for site in sites_with_editorial_functionality:
P = self.profile_site[site]
if P.is_website_down():
# Don't test for websites which are acked to be down
continue
editorial_func = P.get_problem_details
editorial_link = editorial_func(problem_link=assertion_hash["with_editorial"][site]["plink"],
update_things=["editorial_link"])["editorial_link"]
if editorial_link != assertion_hash["with_editorial"][site]["editorial_link"]:
raise RuntimeError(site + " with editorial failure")
if site in assertion_hash["without_editorial"]:
editorial_link = editorial_func(problem_link=assertion_hash["without_editorial"][site],
update_things=["editorial_link"])["editorial_link"]
if editorial_link is not None:
raise RuntimeError(site + " without editorial failure")
# --------------------------------------------------------------------------
def test_problem_setters_retrieval(self):
sites_with_problem_setters = ["CodeChef", "CodeForces", "HackerEarth", "HackerRank", "Spoj", "Timus"]
assertion_hash = {
"with_problem_setters": {
"CodeChef": {
"plink": "https://www.codechef.com/LTIME27/problems/INVERT",
"problem_setters": ["ma5termind"]
},
"CodeForces": {
"plink": "http://www.codeforces.com/problemset/problem/1200/B",
"problem_setters": ["djm03178", "nong"]
},
"HackerEarth": {
"plink": "https://www.hackerearth.com/problem/algorithm/level-selections/",
"problem_setters": ["akileshreddy40950"]
},
"HackerRank": {
"plink": "https://www.hackerrank.com/challenges/candles-2",
"problem_setters": ["gdisastery"]
},
"Timus": {
"plink": "https://acm.timus.ru/problem.aspx?space=1&num=1954&locale=en",
"problem_setters": ["Mikhail Rubinchik (prepared by Kirill Borozdin)"]
},
"Spoj": {
"plink": "https://www.spoj.com/problems/CONNECT2/",
"problem_setters": ["nikola_borisof"]
}
},
"without_problem_setters": {
"CodeForces": "http://www.codeforces.com/problemset/problem/1212/C",
"HackerEarth": "https://www.hackerearth.com/challenges/college/engineers-day-nit-silchar-challenge/algorithm/valentines-day/"
}
}
for site in sites_with_problem_setters:
P = self.profile_site[site]
if P.is_website_down():
# Don't test for websites which are acked to be down
continue
pd_func = P.get_problem_details
current_setters = pd_func(problem_link=assertion_hash["with_problem_setters"][site]["plink"],
update_things=["problem_setters"])["problem_setters"]
if current_setters != assertion_hash["with_problem_setters"][site]["problem_setters"]:
raise RuntimeError(site + " with problem_setters failure")
if site in assertion_hash["without_problem_setters"]:
current_setters = pd_func(problem_link=assertion_hash["without_problem_setters"][site],
update_things=["problem_setters"])["problem_setters"]
if current_setters is not None:
raise RuntimeError(site + " without problem_setters failure")
return
# --------------------------------------------------------------------------
def test_invalid_handle(self):
handle = "thisreallycantbeahandle308"
result = map(lambda site: (site, self.profile_site[site].is_invalid_handle(handle)),
filter(lambda site: self.profile_site[site].is_website_down() == False,
current.SITES.keys()))
failure_sites = []
for site, res in result:
if not res:
failure_sites.append(site)
if len(failure_sites) > 0:
raise RuntimeError(", ".join(failure_sites) + " " + "invalid handle failure")
# --------------------------------------------------------------------------
def test_download_submission(self):
import requests
from bs4 import BeautifulSoup
sites_with_download_functionality = ["CodeChef", "CodeForces"]
assertion_hash = {
"CodeChef": {
"view_link": "https://www.codechef.com/viewsolution/27348746",
"submission": '#include<bits/stdc++.h>\r\nusing namespace std;\r\nint main(){\r\n\tint t;\r\n\tcin>>t;\r\n\twhile(t--){\r\n\t\tint n,m,u,v;\r\n\t\tcin>>n>>m;\r\n\t\tif(m%2==0){\r\n\t\t\tint temp;\r\n\t\t\tfor(auto i=0;i<m;i++){\r\n\t\t\t\tcin>>temp>>temp;\r\n\t\t\t}\t\r\n\t\t\tcout<<1<<endl;\r\n\t\t\tfor(auto i=0;i<n;i++)\r\n\t\t\t{\r\n\t\t\t\tcout<<1<<" ";\r\n\t\t\t}\r\n\t\t\tcout<<endl;\r\n\t\t\tcontinue;\r\n\t\t}\r\n\r\n\t\t// m is odd\r\n\t\tvector<vector<int>> g(n);\r\n\t\tvector<int> d(n);\r\n\t\tfor(auto i=0;i<m;i++){\r\n\t\t\tcin>>u>>v;\r\n\t\t\td[u-1]++;\r\n\t\t\td[v-1]++;\r\n\t\t\tg[u-1].push_back(v-1);\r\n\t\t\tg[v-1].push_back(u-1);\r\n\t\t}\r\n\r\n\t\t// m is odd and we find an odd vertice\r\n\t\tint idx=-1;\r\n\t\tfor(auto i=0;i<n;i++){\r\n\t\t\tif(d[i]%2==1) {idx=i;break;}\r\n\t\t}\r\n\t\tif(idx!=-1){\r\n\t\t\tcout<<2<<endl;\r\n\t\t\tfor(auto i=0;i<n;i++)\r\n\t\t\t{\r\n\t\t\t\tcout<<((i==idx)?1:2)<<" ";\r\n\t\t\t}\r\n\t\t\tcout<<endl;\r\n\t\t\tcontinue;\r\n\r\n\t\t}\r\n\r\n\t\t// m is odd and all degrees are even\r\n\t\t// idx is 3 idx1 is 2 rest is 1\r\n\t\tidx=-1;\r\n\t\tint idx1=-1;\r\n\t\t// find a vertex removing which we get odd vertices\r\n\t\tfor(auto i=0;i<n;i++){\r\n\t\t\tif(d[i]>0){idx=i;break;}\r\n\t\t}\r\n\t\t// idx will be 3\r\n\t\t// change all degrees\r\n\t\tfor(auto i:g[idx]){\r\n\t\t\td[i]--;\r\n\t\t\tidx1=i;\r\n\t\t}\r\n\t\tcout<<3<<endl;\r\n\t\td[idx]=0;\r\n\t\tg[idx]=vector<int>();\r\n\t\tfor(auto i=0;i<n;i++)\r\n\t\t{\r\n\t\t\tif(i==idx){ \r\n\t\t\t\tcout<<1<<" ";\r\n\t\t\t}\r\n\t\t\telse if(i==idx1){\r\n\t\t\t\tcout<<2<<" ";\r\n\t\t\t}\r\n\t\t\telse{\r\n\t\t\t\tcout<<3<<" ";\r\n\t\t\t}\r\n\t\t}\r\n\t\tcout<<endl;\r\n\t}\r\n}\r\n'
},
"CodeForces": {
"view_link": "http://www.codeforces.com/contest/454/submission/7375767",
"submission": '#include<stdio.h>\nint main()\n{\n\tint n,i,j,k;\n\tscanf("%d",&n);\n\tint h=n/2+1;\n\tfor(i=0;i<h;i++)\n\t{\n\t\tfor(k=0;k<n/2-i;k++)\n\t\t\tprintf("*");\n\t\tfor(j=0;j<2*i+1;j++)\n\t\t\tprintf("D");\n\t\tfor(j=n/2+i+1;j<n;j++)\n\t\t\tprintf("*");\n\t\tprintf("\\n");\n\t}\n\tfor(i=0;i<n/2;i++)\n\t{\n\t\tfor(k=0;k<=i;k++)\n\t\t printf("*");\n\t\tfor(j=n-2*i;j>=3;j--)\n\t\t\tprintf("D");\n\t\tfor(j=0;j<=i;j++)\n\t\t\tprintf("*");\n\t\tprintf("\\n");\n\t}\n\treturn 0;\n}\n'
},
"AtCoder": {
"view_link": "https://atcoder.jp/contests/agc039/submissions/7869333",
"submission": "/**\r\n * author: tourist\r\n * created: 05.10.2019 16:12:28 \r\n**/\r\n#include <bits/stdc++.h>\r\n\r\nusing namespace std;\r\n\r\nint main() {\r\n ios::sync_with_stdio(false);\r\n cin.tie(0);\r\n int n;\r\n cin >> n;\r\n n *= 2;\r\n vector<string> g(n);\r\n for (int i = 0; i < n; i++) {\r\n cin >> g[i];\r\n }\r\n vector<vector<vector<long long>>> dp(2 * n, vector<vector<long long>>(2 * n, vector<long long>(2 * n)));\r\n for (int i = n - 1; i >= 1; i--) {\r\n for (int j = i; j < n; j++) {\r\n for (int k = j; k < n; k++) {\r\n if (i == j && j == k) {\r\n dp[i][j][k] = 1;\r\n continue;\r\n }\r\n if (i == j || j == k) {\r\n dp[i][j][k] = 0;\r\n continue;\r\n }\r\n dp[i][j][k] = 0;\r\n for (int x = i; x < j; x++) {\r\n for (int y = j + 1; y <= k; y++) {\r\n for (int u = i; u <= x; u++) {\r\n for (int v = y; v <= k; v++) {\r\n if (g[u][v] == '1') {\r\n dp[i][j][k] += dp[i][u][x] * dp[y][v][k] * dp[x + 1][j][y - 1];\r\n }\r\n }\r\n }\r\n }\r\n }\r\n }\r\n }\r\n }\r\n long long ans = 0;\r\n for (int j = 1; j < n; j++) {\r\n if (g[0][j] == '1') {\r\n ans += dp[1][j][n - 1];\r\n }\r\n }\r\n cout << ans << '\\n';\r\n return 0;\r\n}\r\n"
}
}
for site in sites_with_download_functionality:
P = self.profile_site[site]
if P.is_website_down():
# Don't test for websites which are acked to be down
continue
submission_content = P.download_submission(assertion_hash[site]["view_link"])
if submission_content != assertion_hash[site]["submission"]:
raise RuntimeError(site + " download submission failed")
# --------------------------------------------------------------------------
def test_rating_graph(self):
sites_with_rating_graph_functionality = ["CodeChef", "CodeForces", "HackerRank", "HackerEarth"]
handles = {
"CodeChef": "tryingtocode",
"CodeForces": "raj454raj",
"HackerRank": "tryingtocode",
"HackerEarth": "karanaggarwal",
"AtCoder": "imanudeep111"
}
expected_list = {
"CodeChef": [{'data': {'2015-06-15 15:00:00': {'url': 'https://www.codechef.com/JUNE15', 'rating': '1605', 'name': 'June Challenge 2015', 'rank': '1913'}, '2016-06-15 15:00:00': {'url': 'https://www.codechef.com/JUNE16', 'rating': '1641', 'name': 'June Challenge 2016', 'rank': '5083'}, '2014-07-14 15:00:00': {'url': 'https://www.codechef.com/JULY14', 'rating': '1518', 'name': 'July Challenge 2014', 'rank': '2769'}, '2015-08-17 15:00:00': {'url': 'https://www.codechef.com/AUG15', 'rating': '1704', 'name': 'August Challenge 2015', 'rank': '1244'}, '2014-01-13 15:00:00': {'url': 'https://www.codechef.com/JAN14', 'rating': '1462', 'name': 'January Challenge 2014', 'rank': '3548'}, '2014-12-15 17:00:00': {'url': 'https://www.codechef.com/DEC14', 'rating': '1609', 'name': 'December Challenge 2014', 'rank': '2218'}, '2015-01-12 15:00:00': {'url': 'https://www.codechef.com/JAN15', 'rating': '1617', 'name': 'January Challenge 2015', 'rank': '3105'}, '2015-09-14 15:00:00': {'url': 'https://www.codechef.com/SEPT15', 'rating': '1829', 'name': 'September Challenge 2015', 'rank': '1417'}, '2014-11-17 15:00:00': {'url': 'https://www.codechef.com/NOV14', 'rating': '1717', 'name': 'November Challenge 2014', 'rank': '1751'}, '2015-03-16 15:00:00': {'url': 'https://www.codechef.com/MARCH15', 'rating': '1553', 'name': 'March Challenge 2015', 'rank': '2489'}, '2014-06-16 15:00:00': {'url': 'https://www.codechef.com/JUNE14', 'rating': '1455', 'name': 'June Challenge 2014', 'rank': '4382'}, '2014-02-17 15:00:00': {'url': 'https://www.codechef.com/FEB14', 'rating': '1509', 'name': 'February Challenge 2014', 'rank': '2007'}, '2015-05-18 15:00:00': {'url': 'https://www.codechef.com/MAY15', 'rating': '1519', 'name': 'May Challenge 2015', 'rank': '2946'}, '2015-07-13 15:00:00': {'url': 'https://www.codechef.com/JULY15', 'rating': '1635', 'name': 'July Challenge 2015', 'rank': '1554'}, '2014-08-11 15:00:00': {'url': 'https://www.codechef.com/AUG14', 'rating': '1633', 'name': 'August Challenge 2014', 'rank': '1293'}, '2014-10-13 15:00:00': {'url': 'https://www.codechef.com/OCT14', 'rating': '1730', 'name': 'October Challenge 2014', 'rank': '900'}}, 'title': 'CodeChef Long'}, {'data': {'2015-09-21 00:00:00': {'url': 'https://www.codechef.com/COOK62', 'rating': '1807', 'name': 'September Mega Cook-Off 2015', 'rank': '751'}, '2015-08-24 00:50:00': {'url': 'https://www.codechef.com/COOK61', 'rating': '1881', 'name': 'August Cook-Off 2015', 'rank': '221'}}, 'title': 'CodeChef Cook-off'}, {'data': {}, 'title': 'CodeChef Lunchtime'}],
"CodeForces": [{'data': {'2015-09-28 14:30:00': {'rating': '1295', 'name': u'Codeforces Round #322 (Div. 2)', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/581', 'rank': 1836, 'ratingChange': -84}, '2014-09-28 21:05:00': {'rating': '1279', 'name': u'Codeforces Round #270', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/472', 'rank': 3520, 'ratingChange': -124}, '2015-09-10 22:00:00': {'rating': '1422', 'name': u'Codeforces Round #319 (Div. 2)', 'solvedCount': 2, 'url': 'http://www.codeforces.com/contest/577', 'rank': 940, 'ratingChange': 134}, '2016-01-14 22:05:00': {'rating': '1228', 'name': u'Codeforces Round #339 (Div. 2)', 'solvedCount': 0, 'url': 'http://www.codeforces.com/contest/614', 'rank': 1929, 'ratingChange': -81}, '2016-08-20 18:35:00': {'rating': '1298', 'name': u'Codeforces Round #368 (Div. 2)', 'solvedCount': 2, 'url': 'http://www.codeforces.com/contest/707', 'rank': 1919, 'ratingChange': 82}, '2015-10-31 22:00:00': {'rating': '1284', 'name': u'Codeforces Round #328 (Div. 2)', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/592', 'rank': 2075, 'ratingChange': 11}, '2015-10-25 14:30:00': {'rating': '1273', 'name': u'Codeforces Round #327 (Div. 2)', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/591', 'rank': 2259, 'ratingChange': -25}, '2015-09-22 22:00:00': {'rating': '1379', 'name': u'Codeforces Round #321 (Div. 2)', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/580', 'rank': 2018, 'ratingChange': -43}, '2014-08-08 21:00:00': {'rating': '1403', 'name': u'Codeforces Round #260 (Div. 2)', 'solvedCount': 0, 'url': 'http://www.codeforces.com/contest/456', 'rank': 2152, 'ratingChange': -97}, '2015-12-01 21:05:00': {'rating': '1351', 'name': u'Codeforces Round #334 (Div. 2)', 'solvedCount': 2, 'url': 'http://www.codeforces.com/contest/604', 'rank': 1079, 'ratingChange': 67}, '2016-08-29 17:35:00': {'rating': '1309', 'name': u'Codeforces Round #369 (Div. 2)', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/711', 'rank': 2332, 'ratingChange': 11}, '2015-12-09 21:35:00': {'rating': '1309', 'name': u'Codeforces Round #335 (Div. 2)', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/606', 'rank': 2249, 'ratingChange': -42}, '2016-08-11 22:05:00': {'rating': '1216', 'name': u'Codeforces Round #367 (Div. 2)', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/706', 'rank': 2989, 'ratingChange': -12}, '2015-08-29 22:00:00': {'rating': '1288', 'name': u'Codeforces Round #318 [RussianCodeCup Thanks-Round] (Div. 2)', 'solvedCount': 1, 'url': 'http://www.codeforces.com/contest/574', 'rank': 2009, 'ratingChange': -70}, '2015-10-03 22:15:00': {'rating': '1285', 'name': u'Codeforces Round #323 (Div. 2)', 'solvedCount': 2, 'url': 'http://www.codeforces.com/contest/583', 'rank': 2912, 'ratingChange': -10}, '2015-10-06 22:00:00': {'rating': '1298', 'name': u'Codeforces Round #324 (Div. 2)', 'solvedCount': 2, 'url': 'http://www.codeforces.com/contest/584', 'rank': 2062, 'ratingChange': 13}, '2014-10-06 21:00:00': {'rating': '1227', 'name': u'Codeforces Round #271 (Div. 2)', 'solvedCount': 2, 'url': 'http://www.codeforces.com/contest/474', 'rank': 1654, 'ratingChange': -52}, '2015-08-22 22:00:00': {'rating': '1358', 'name': u'Codeforces Round #317 [AimFund Thanks-Round] (Div. 2)', 'solvedCount': 2, 'url': 'http://www.codeforces.com/contest/572', 'rank': 1114, 'ratingChange': 131}, '2016-09-23 18:35:00': {'rating': '1377', 'name': u'Codeforces Round #373 (Div. 2)', 'solvedCount': 2, 'url': 'http://www.codeforces.com/contest/719', 'rank': 1593, 'ratingChange': 68}}, 'title': 'Codeforces'}],
"HackerRank": [{'data': {'2014-07-21 21:30:00': {'url': u'https://www.hackerrank.com/w7', 'rating': '1554.46', 'name': u'Weekly Challenges - Week 7', 'rank': 499}, '2015-10-30 21:30:00': {'url': u'https://www.hackerrank.com/codestorm', 'rating': '1276.05', 'name': u'CodeStorm 2015', 'rank': 3743}, '2015-08-02 21:30:00': {'url': u'https://www.hackerrank.com/countercode', 'rating': '1287.0', 'name': u'CounterCode 2015', 'rank': 3605}, '2014-08-11 21:30:00': {'url': u'https://www.hackerrank.com/w8', 'rating': '1276.88', 'name': u'Weekly Challenges - Week 8', 'rank': 1204}}, 'title': u'HackerRank - Algorithms'}],
"HackerEarth": [{'data': {'2016-05-21 10:30:00': {'url': 'https://www.hackerearth.com/challenges/competitive/may-circuits/', 'rating': 1493, 'name': 'May Circuits', 'rank': 714}, '2017-10-21 10:30:00': {'url': 'https://www.hackerearth.com/challenges/competitive/october-circuits-17/', 'rating': 1491, 'name': "October Circuits '17", 'rank': 1225}, '2017-09-22 10:30:00': {'url': 'https://www.hackerearth.com/challenges/competitive/september-circuits-17/', 'rating': 1569, 'name': "September Circuits '17", 'rank': 291}, '2020-05-16 10:30:00': {'url': 'https://www.hackerearth.com/challenges/competitive/may-circuits-20/', 'rating': 1415, 'name': "May Circuits '20", 'rank': 647}, '2018-03-17 10:30:00': {'url': 'https://www.hackerearth.com/challenges/competitive/march-circuits-18/', 'rating': 1461, 'name': "March Circuits '18", 'rank': 523}, '2019-01-18 09:30:00': {'url': 'https://www.hackerearth.com/challenges/competitive/january-circuits-19/', 'rating': 1337, 'name': "January Circuits '19", 'rank': 3420}, '2017-07-28 10:30:00': {'url': 'https://www.hackerearth.com/challenges/competitive/july-circuits-17/', 'rating': 1462, 'name': "July Circuits '17", 'rank': 1326}}, 'title': 'HackerEarth'}],
"AtCoder": [{'data': {'2020-01-10 19:10:00': {'url': 'https://atcoder.jp/contests/abc150', 'rating': '-', 'ratingChange': '-', 'name': u'AtCoder Beginner Contest 150', 'rank': u'2640'}, '2020-03-14 19:10:00': {'url': 'https://atcoder.jp/contests/panasonic2020', 'rating': '33', 'ratingChange': '+31', 'name': u'Panasonic Programming Contest 2020', 'rank': u'3897'}, '2020-05-02 19:20:00': {'url': 'https://atcoder.jp/contests/abc165', 'rating': '192', 'ratingChange': '+51', 'name': u'AtCoder Beginner Contest 165', 'rank': u'6343'}, '2020-03-01 19:10:00': {'url': 'https://atcoder.jp/contests/abc157', 'rating': '2', 'ratingChange': '-', 'name': u'AtCoder Beginner Contest 157', 'rank': u'6327'}, '2020-04-26 19:10:00': {'url': 'https://atcoder.jp/contests/abc164', 'rating': '141', 'ratingChange': '+108', 'name': u'AtCoder Beginner Contest 164', 'rank': u'3184'}, '2020-04-19 19:10:00': {'url': 'https://atcoder.jp/contests/abc163', 'rating': '-', 'ratingChange': '-', 'name': u'AtCoder Beginner Contest 163', 'rank': u'4042'}}, 'title': 'AtCoder'}]
}
result = {}
for site in sites_with_rating_graph_functionality:
P = self.profile_site[site]
if P.is_website_down():
# Don't test for websites which are acked to be down
continue
get_rating_func = P.rating_graph_data
res = get_rating_func(handles[site])
if expected_list[site] != res:
raise RuntimeError("Rating graph dict does not match for " + site)
# --------------------------------------------------------------------------
def test_submissions(self):
handles = {
"CodeChef": "tryingtocode",
"CodeForces": "raj454raj",
"HackerRank": "tryingtocode",
"HackerEarth": "raj454raj",
"Spoj": "raj454raj",
"UVa": "raj454raj",
"Timus": "222187",
"AtCoder": "raj454raj"
}
expected_result = {
"CodeChef": [(u'2013-12-02 18:52:13', u'https://www.codechef.com/PRACTICE/problems/TEST', u'TEST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3017060'), (u'2013-12-02 19:02:07', u'https://www.codechef.com/PRACTICE/problems/TEST', u'TEST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3017069'), (u'2013-12-02 19:13:59', u'https://www.codechef.com/PRACTICE/problems/HS08TEST', u'HS08TEST', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3017092'), (u'2013-12-02 19:16:51', u'https://www.codechef.com/PRACTICE/problems/HS08TEST', u'HS08TEST', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3017097'), (u'2013-12-02 19:20:42', u'https://www.codechef.com/PRACTICE/problems/HS08TEST', u'HS08TEST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3017102'), (u'2013-12-02 19:31:26', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3017121'), (u'2013-12-03 01:15:08', u'https://www.codechef.com/PRACTICE/problems/FCTRL', u'FCTRL', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3017614'), (u'2013-12-03 01:15:44', u'https://www.codechef.com/PRACTICE/problems/FCTRL', u'FCTRL', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3017615'), (u'2013-12-03 01:18:21', u'https://www.codechef.com/PRACTICE/problems/FCTRL', u'FCTRL', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3017619'), (u'2013-12-03 01:23:05', u'https://www.codechef.com/PRACTICE/problems/FCTRL', u'FCTRL', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3017629'), (u'2013-12-03 01:33:10', u'https://www.codechef.com/PRACTICE/problems/FCTRL2', u'FCTRL2', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3017639'), (u'2013-12-06 13:51:02', u'https://www.codechef.com/PRACTICE/problems/PRPALIN', u'PRPALIN', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3023114'), (u'2013-12-06 13:59:27', u'https://www.codechef.com/PRACTICE/problems/PRPALIN', u'PRPALIN', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3023128'), (u'2013-12-06 14:26:23', u'https://www.codechef.com/PRACTICE/problems/NUMPATH', u'NUMPATH', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3023162'), (u'2013-12-06 14:34:44', u'https://www.codechef.com/PRACTICE/problems/PRPALIN', u'PRPALIN', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3023172'), (u'2013-12-06 14:40:45', u'https://www.codechef.com/PRACTICE/problems/PRPALIN', u'PRPALIN', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3023183'), (u'2013-12-06 14:58:49', u'https://www.codechef.com/PRACTICE/problems/PRPALIN', u'PRPALIN', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3023209'), (u'2013-12-06 15:22:57', u'https://www.codechef.com/PRACTICE/problems/HOLES', u'HOLES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3023522'), (u'2013-12-12 15:04:32', u'https://www.codechef.com/PRACTICE/problems/NAME2', u'NAME2', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3076899'), (u'2013-12-12 15:22:56', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3077003'), (u'2013-12-12 15:24:57', u'https://www.codechef.com/PRACTICE/problems/MAXCOUNT', u'MAXCOUNT', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3077013'), (u'2013-12-12 17:41:44', u'https://www.codechef.com/PRACTICE/problems/DECSTR', u'DECSTR', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3077862'), (u'2013-12-12 18:04:39', u'https://www.codechef.com/PRACTICE/problems/DECSTR', u'DECSTR', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3078001'), (u'2013-12-12 18:53:41', u'https://www.codechef.com/PRACTICE/problems/DECSTR', u'DECSTR', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3078284'), (u'2013-12-12 19:26:47', u'https://www.codechef.com/PRACTICE/problems/DECSTR', u'DECSTR', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3078484'), (u'2013-12-12 19:39:23', u'https://www.codechef.com/PRACTICE/problems/NAME2', u'NAME2', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3078558'), (u'2013-12-13 15:04:16', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3083547'), (u'2013-12-13 15:09:42', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3083574'), (u'2013-12-13 15:13:40', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3083602'), (u'2013-12-13 19:30:02', u'https://www.codechef.com/PRACTICE/problems/NAME2', u'NAME2', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3085115'), (u'2013-12-14 13:37:45', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3089188'), (u'2013-12-14 13:40:39', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3089199'), (u'2013-12-14 13:45:29', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3089226'), (u'2013-12-14 19:29:31', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3091091'), (u'2013-12-18 00:17:52', u'https://www.codechef.com/PRACTICE/problems/ONP', u'ONP', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3108217'), (u'2013-12-18 00:29:10', u'https://www.codechef.com/PRACTICE/problems/ONP', u'ONP', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3108251'), (u'2013-12-18 00:58:37', u'https://www.codechef.com/PRACTICE/problems/ONP', u'ONP', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3108323'), (u'2013-12-18 01:04:19', u'https://www.codechef.com/PRACTICE/problems/ONP', u'ONP', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3108336'), (u'2013-12-18 01:46:49', u'https://www.codechef.com/PRACTICE/problems/SUMTRIAN', u'SUMTRIAN', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3108432'), (u'2013-12-18 02:02:45', u'https://www.codechef.com/PRACTICE/problems/COINS', u'COINS', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3108454'), (u'2013-12-18 02:09:53', u'https://www.codechef.com/PRACTICE/problems/COINS', u'COINS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3108466'), (u'2013-12-18 02:19:38', u'https://www.codechef.com/PRACTICE/problems/COINS', u'COINS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3108479'), (u'2013-12-18 02:36:47', u'https://www.codechef.com/PRACTICE/problems/COINS', u'COINS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3108489'), (u'2013-12-18 02:38:40', u'https://www.codechef.com/PRACTICE/problems/COINS', u'COINS', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3108491'), (u'2013-12-18 02:40:21', u'https://www.codechef.com/PRACTICE/problems/COINS', u'COINS', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3108493'), (u'2013-12-19 23:56:23', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/3113518'), (u'2013-12-19 23:58:35', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/3113525'), (u'2013-12-20 00:00:56', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3113535'), (u'2013-12-20 02:45:48', u'https://www.codechef.com/PRACTICE/problems/FCTRL2', u'FCTRL2', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/3113821'), (u'2013-12-20 02:48:52', u'https://www.codechef.com/PRACTICE/problems/FCTRL2', u'FCTRL2', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3113825'), (u'2013-12-20 03:10:47', u'https://www.codechef.com/PRACTICE/problems/MARBLES', u'MARBLES', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3113849'), (u'2013-12-20 03:27:48', u'https://www.codechef.com/PRACTICE/problems/MARBLES', u'MARBLES', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3113865'), (u'2013-12-20 03:43:53', u'https://www.codechef.com/PRACTICE/problems/MARBLES', u'MARBLES', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3113877'), (u'2013-12-20 15:47:52', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/3114663'), (u'2013-12-20 15:49:13', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/3114664'), (u'2013-12-20 15:52:15', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3114671'), (u'2013-12-20 15:58:50', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3114683'), (u'2014-01-01 22:25:19', u'https://www.codechef.com/PRACTICE/problems/MSTICK', u'MSTICK', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3148896'), (u'2014-01-02 22:42:07', u'https://www.codechef.com/PRACTICE/problems/RESIST', u'RESIST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3150795'), (u'2014-01-02 22:54:14', u'https://www.codechef.com/PRACTICE/problems/RESIST', u'RESIST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3150836'), (u'2014-01-02 22:56:42', u'https://www.codechef.com/PRACTICE/problems/RESIST', u'RESIST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3150842'), (u'2014-01-02 22:58:50', u'https://www.codechef.com/PRACTICE/problems/RESIST', u'RESIST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3150846'), (u'2014-01-02 23:18:24', u'https://www.codechef.com/PRACTICE/problems/MSTICK', u'MSTICK', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3150913'), (u'2014-01-05 16:58:47', u'https://www.codechef.com/PRACTICE/problems/TWTCLOSE', u'TWTCLOSE', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3188137'), (u'2014-01-06 21:24:27', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3200011'), (u'2014-01-06 21:29:23', u'https://www.codechef.com/PRACTICE/problems/SAD', u'SAD', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3200056'), (u'2014-01-06 21:58:37', u'https://www.codechef.com/PRACTICE/problems/FLIPCOIN', u'FLIPCOIN', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3200313'), (u'2014-01-06 22:50:32', u'https://www.codechef.com/PRACTICE/problems/FLIPCOIN', u'FLIPCOIN', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3200883'), (u'2014-01-07 15:19:35', u'https://www.codechef.com/PRACTICE/problems/LEVY', u'LEVY', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3205638'), (u'2014-01-07 15:23:13', u'https://www.codechef.com/PRACTICE/problems/LEVY', u'LEVY', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/3205664'), (u'2014-01-07 15:38:53', u'https://www.codechef.com/PRACTICE/problems/LEVY', u'LEVY', 'CE', u'0', u'C++ 4.3.2', 'https://www.codechef.com/viewsolution/3205784'), (u'2014-01-08 17:18:58', u'https://www.codechef.com/JAN14/problems/ERROR', u'ERROR', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3215076'), (u'2014-01-08 17:32:16', u'https://www.codechef.com/JAN14/problems/ERROR', u'ERROR', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3215197'), (u'2014-01-08 17:34:26', u'https://www.codechef.com/JAN14/problems/PLZLYKME', u'PLZLYKME', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3215217'), (u'2014-01-08 17:50:31', u'https://www.codechef.com/JAN14/problems/PLZLYKME', u'PLZLYKME', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3215325'), (u'2014-01-08 23:01:50', u'https://www.codechef.com/JAN14/problems/FGFS', u'FGFS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3217930'), (u'2014-01-09 18:42:17', u'https://www.codechef.com/PRACTICE/problems/TSORT', u'TSORT', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3223261'), (u'2014-01-09 18:49:03', u'https://www.codechef.com/PRACTICE/problems/TSORT', u'TSORT', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3223313'), (u'2014-01-09 18:57:00', u'https://www.codechef.com/PRACTICE/problems/TSORT', u'TSORT', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3223384'), (u'2014-01-09 19:26:01', u'https://www.codechef.com/PRACTICE/problems/PERMUT2', u'PERMUT2', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3223635'), (u'2014-01-09 19:28:32', u'https://www.codechef.com/PRACTICE/problems/PERMUT2', u'PERMUT2', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3223652'), (u'2014-01-09 19:47:04', u'https://www.codechef.com/PRACTICE/problems/TLG', u'TLG', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3223799'), (u'2014-01-09 20:32:49', u'https://www.codechef.com/PRACTICE/problems/TLG', u'TLG', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3224190'), (u'2014-01-09 20:35:41', u'https://www.codechef.com/PRACTICE/problems/TLG', u'TLG', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3224222'), (u'2014-01-09 23:53:53', u'https://www.codechef.com/PRACTICE/problems/TLG', u'TLG', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3225832'), (u'2014-01-10 00:14:05', u'https://www.codechef.com/PRACTICE/problems/NUMGAME', u'NUMGAME', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3226019'), (u'2014-01-10 23:16:53', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3231942'), (u'2014-01-10 23:25:05', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3232000'), (u'2014-01-10 23:32:09', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3232061'), (u'2014-01-10 23:37:08', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3232115'), (u'2014-01-10 23:46:15', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3232189'), (u'2014-01-12 16:08:22', u'https://www.codechef.com/PRACTICE/problems/D1', u'D1', u'TLE', u'0', u'PYTH', 'https://www.codechef.com/viewsolution/3242893'), (u'2014-01-12 16:41:33', u'https://www.codechef.com/PRACTICE/problems/ASTRGAME', u'ASTRGAME', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3243146'), (u'2014-01-12 16:43:25', u'https://www.codechef.com/PRACTICE/problems/ASTRGAME', u'ASTRGAME', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3243158'), (u'2014-01-12 19:38:52', u'https://www.codechef.com/PRACTICE/problems/KPRIME', u'KPRIME', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3244328'), (u'2014-01-12 20:04:49', u'https://www.codechef.com/PRACTICE/problems/KPRIME', u'KPRIME', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3244480'), (u'2014-01-13 10:34:13', u'https://www.codechef.com/PRACTICE/problems/BUY1GET1', u'BUY1GET1', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3248580'), (u'2014-01-13 10:41:26', u'https://www.codechef.com/PRACTICE/problems/BUY1GET1', u'BUY1GET1', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3248611'), (u'2014-01-13 10:52:51', u'https://www.codechef.com/PRACTICE/problems/BUY1GET1', u'BUY1GET1', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3248674'), (u'2014-01-13 11:53:09', u'https://www.codechef.com/PRACTICE/problems/HORSES', u'HORSES', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3249017'), (u'2014-01-13 12:01:58', u'https://www.codechef.com/PRACTICE/problems/HORSES', u'HORSES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3249080'), (u'2014-01-13 12:13:20', u'https://www.codechef.com/PRACTICE/problems/NUMGAME', u'NUMGAME', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3249157'), (u'2014-01-13 12:30:50', u'https://www.codechef.com/PRACTICE/problems/BUY1GET1', u'BUY1GET1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3249302'), (u'2014-01-13 13:14:27', u'https://www.codechef.com/PRACTICE/problems/TWSTR', u'TWSTR', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3249663'), (u'2014-01-13 20:23:37', u'https://www.codechef.com/PRACTICE/problems/HELLO', u'HELLO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3251908'), (u'2014-01-13 21:07:57', u'https://www.codechef.com/PRACTICE/problems/DIGROT', u'DIGROT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3252038'), (u'2014-01-13 21:46:16', u'https://www.codechef.com/PRACTICE/problems/HELLO', u'HELLO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3252146'), (u'2014-01-13 22:06:21', u'https://www.codechef.com/PRACTICE/problems/HELLO', u'HELLO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3252214'), (u'2014-01-13 22:13:24', u'https://www.codechef.com/PRACTICE/problems/HELLO', u'HELLO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3252242'), (u'2014-01-13 22:15:40', u'https://www.codechef.com/PRACTICE/problems/HELLO', u'HELLO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3252253'), (u'2014-01-13 22:21:15', u'https://www.codechef.com/PRACTICE/problems/HELLO', u'HELLO', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3252279'), (u'2014-01-14 00:21:02', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3252851'), (u'2014-01-14 01:05:42', u'https://www.codechef.com/PRACTICE/problems/LAPIN', u'LAPIN', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3253032'), (u'2014-01-14 01:08:04', u'https://www.codechef.com/PRACTICE/problems/LAPIN', u'LAPIN', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3253049'), (u'2014-01-14 01:11:18', u'https://www.codechef.com/PRACTICE/problems/LAPIN', u'LAPIN', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3253069'), (u'2014-01-14 14:06:41', u'https://www.codechef.com/PRACTICE/problems/PPXOR', u'PPXOR', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3254264'), (u'2014-01-14 19:12:48', u'https://www.codechef.com/PRACTICE/problems/CHEFTEAM', u'CHEFTEAM', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3255054'), (u'2014-01-14 19:36:22', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3255134'), (u'2014-01-14 21:11:50', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3255392'), (u'2014-01-14 21:41:46', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3255474'), (u'2014-01-16 18:39:17', u'https://www.codechef.com/PRACTICE/problems/TACHSTCK', u'TACHSTCK', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3260781'), (u'2014-01-16 19:08:18', u'https://www.codechef.com/PRACTICE/problems/TACHSTCK', u'TACHSTCK', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3260885'), (u'2014-01-16 19:36:52', u'https://www.codechef.com/PRACTICE/problems/PRIMES2', u'PRIMES2', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3261016'), (u'2014-01-18 18:40:00', u'https://www.codechef.com/PRACTICE/problems/RRMATRIX', u'RRMATRIX', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3266986'), (u'2014-01-18 19:16:39', u'https://www.codechef.com/PRACTICE/problems/GRANAMA', u'GRANAMA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3267092'), (u'2014-01-18 19:25:40', u'https://www.codechef.com/PRACTICE/problems/GRANAMA', u'GRANAMA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3267123'), (u'2014-01-18 20:29:27', u'https://www.codechef.com/PRACTICE/problems/GRANAMA', u'GRANAMA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3267298'), (u'2014-01-18 20:35:24', u'https://www.codechef.com/PRACTICE/problems/GRANAMA', u'GRANAMA', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3267306'), (u'2014-01-23 10:03:37', u'https://www.codechef.com/PRACTICE/problems/NUKES', u'NUKES', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3283319'), (u'2014-01-23 10:04:57', u'https://www.codechef.com/PRACTICE/problems/JOHNY', u'JOHNY', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3283321'), (u'2014-01-23 10:06:21', u'https://www.codechef.com/PRACTICE/problems/RIGHTRI', u'RIGHTRI', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3283322'), (u'2014-01-23 10:07:29', u'https://www.codechef.com/PRACTICE/problems/RIGHTRI', u'RIGHTRI', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3283325'), (u'2014-01-23 10:19:28', u'https://www.codechef.com/PRACTICE/problems/RIGHTRI', u'RIGHTRI', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3283340'), (u'2014-01-23 10:22:56', u'https://www.codechef.com/PRACTICE/problems/NUKES', u'NUKES', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3283347'), (u'2014-01-23 10:27:39', u'https://www.codechef.com/PRACTICE/problems/NUKES', u'NUKES', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3283353'), (u'2014-01-23 10:30:21', u'https://www.codechef.com/PRACTICE/problems/NUKES', u'NUKES', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3283357'), (u'2014-01-23 10:42:45', u'https://www.codechef.com/PRACTICE/problems/LAPIN', u'LAPIN', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3283378'), (u'2014-01-23 10:50:27', u'https://www.codechef.com/PRACTICE/problems/LAPIN', u'LAPIN', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3283389'), (u'2014-01-23 10:58:07', u'https://www.codechef.com/PRACTICE/problems/NUKES', u'NUKES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3283393'), (u'2014-02-07 13:56:26', u'https://www.codechef.com/PRACTICE/problems/NUMGAME', u'NUMGAME', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3339806'), (u'2014-02-07 14:04:43', u'https://www.codechef.com/PRACTICE/problems/NUMGAME', u'NUMGAME', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3339834'), (u'2014-02-07 14:07:56', u'https://www.codechef.com/PRACTICE/problems/NUMGAME', u'NUMGAME', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3339845'), (u'2014-02-07 14:12:05', u'https://www.codechef.com/PRACTICE/problems/NUMGAME', u'NUMGAME', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3339853'), (u'2014-02-07 14:43:35', u'https://www.codechef.com/PRACTICE/problems/CIELRCPT', u'CIELRCPT', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3339922'), (u'2014-02-08 18:56:14', u'https://www.codechef.com/FEB14/problems/LCPESY', u'LCPESY', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3359518'), (u'2014-02-08 19:12:55', u'https://www.codechef.com/FEB14/problems/LCPESY', u'LCPESY', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3359744'), (u'2014-02-08 19:39:00', u'https://www.codechef.com/FEB14/problems/SUBMIN', u'SUBMIN', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3360100'), (u'2014-02-11 15:14:10', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3387212'), (u'2014-02-11 15:20:54', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3387257'), (u'2014-02-11 15:30:00', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3387312'), (u'2014-02-11 16:35:28', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3387693'), (u'2014-02-11 16:51:49', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3387801'), (u'2014-02-11 16:55:47', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3387826'), (u'2014-02-13 15:27:31', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3401986'), (u'2014-02-13 16:24:34', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3402304'), (u'2014-02-13 16:52:47', u'https://www.codechef.com/FEB14/problems/TWODOGS', u'TWODOGS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3402476'), (u'2014-02-22 21:12:12', u'https://www.codechef.com/CDMT2014/problems/MIRRORS', u'MIRRORS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3455971'), (u'2014-02-22 21:14:12', u'https://www.codechef.com/CDMT2014/problems/MIRRORS', u'MIRRORS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3456012'), (u'2014-02-22 21:21:11', u'https://www.codechef.com/CDMT2014/problems/MIRRORS', u'MIRRORS', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3456160'), (u'2014-02-23 00:04:09', u'https://www.codechef.com/CDMT2014/problems/TILE', u'TILE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3460835'), (u'2014-02-23 00:07:15', u'https://www.codechef.com/CDMT2014/problems/TILE0', u'TILE0', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3460874'), (u'2014-02-23 00:23:39', u'https://www.codechef.com/CDNCTR14/problems/QUEST', u'QUEST', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/3461126'), (u'2014-02-23 00:35:48', u'https://www.codechef.com/CDNCTR14/problems/QUEST', u'QUEST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3461310'), (u'2014-02-23 01:13:51', u'https://www.codechef.com/CDNCTR14/problems/ARRAY', u'ARRAY', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3461817'), (u'2014-02-23 01:53:29', u'https://www.codechef.com/CDNCTR14/problems/GOT', u'GOT', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3462204'), (u'2014-02-23 02:37:48', u'https://www.codechef.com/CDNCTR14/problems/JADEJA', u'JADEJA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3462594'), (u'2014-02-23 02:42:04', u'https://www.codechef.com/CDNCTR14/problems/JADEJA', u'JADEJA', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/3462619'), (u'2014-02-26 23:33:32', u'https://www.codechef.com/PRACTICE/problems/WCOUNT', u'WCOUNT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3477325'), (u'2014-03-04 16:51:10', u'https://www.codechef.com/PRACTICE/problems/TPRODUCT', u'TPRODUCT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3497768'), (u'2014-03-04 17:08:05', u'https://www.codechef.com/PRACTICE/problems/TPRODUCT', u'TPRODUCT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3497791'), (u'2014-03-04 17:11:05', u'https://www.codechef.com/PRACTICE/problems/TPRODUCT', u'TPRODUCT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/3497796'), (u'2014-05-25 02:14:27', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', 'RE', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/3938402'), (u'2014-05-25 02:16:35', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3938403'), (u'2014-05-25 02:19:23', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3938407'), (u'2014-05-25 02:28:54', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/3938415'), (u'2014-06-08 15:50:16', u'https://www.codechef.com/JUNE14/problems/CHEFZOT', u'CHEFZOT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4019362'), (u'2014-06-08 15:52:51', u'https://www.codechef.com/JUNE14/problems/CHEFZOT', u'CHEFZOT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4019398'), (u'2014-06-08 15:57:49', u'https://www.codechef.com/JUNE14/problems/CHEFZOT', u'CHEFZOT', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4019468'), (u'2014-06-08 16:11:10', u'https://www.codechef.com/JUNE14/problems/GUESS', u'GUESS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4019668'), (u'2014-06-08 16:13:49', u'https://www.codechef.com/JUNE14/problems/GUESS', u'GUESS', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4019713'), (u'2014-06-08 17:28:24', u'https://www.codechef.com/JUNE14/problems/FORGETPW', u'FORGETPW', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4020749'), (u'2014-06-09 20:48:17', u'https://www.codechef.com/JUNE14/problems/FORGETPW', u'FORGETPW', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4036865'), (u'2014-06-09 20:51:39', u'https://www.codechef.com/JUNE14/problems/FORGETPW', u'FORGETPW', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4036902'), (u'2014-06-09 20:56:28', u'https://www.codechef.com/JUNE14/problems/FORGETPW', u'FORGETPW', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4036949'), (u'2014-06-11 07:33:23', u'https://www.codechef.com/JUNE14/problems/FORGETPW', u'FORGETPW', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4053523'), (u'2014-06-11 07:54:41', u'https://www.codechef.com/PRACTICE/problems/ALEXNUMB', u'ALEXNUMB', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4053566'), (u'2014-06-11 07:57:12', u'https://www.codechef.com/PRACTICE/problems/ALEXNUMB', u'ALEXNUMB', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4053571'), (u'2014-06-11 07:59:02', u'https://www.codechef.com/PRACTICE/problems/ALEXNUMB', u'ALEXNUMB', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4053576'), (u'2014-06-11 08:04:58', u'https://www.codechef.com/PRACTICE/problems/ALEXNUMB', u'ALEXNUMB', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4053599'), (u'2014-06-11 08:08:47', u'https://www.codechef.com/PRACTICE/problems/ALEXNUMB', u'ALEXNUMB', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4053611'), (u'2014-06-11 08:20:27', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/4053646'), (u'2014-06-11 08:21:52', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/4053653'), (u'2014-06-11 08:22:42', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/4053659'), (u'2014-06-11 08:35:28', u'https://www.codechef.com/PRACTICE/problems/MAXDIFF', u'MAXDIFF', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4053715'), (u'2014-06-11 08:41:38', u'https://www.codechef.com/PRACTICE/problems/MAXDIFF', u'MAXDIFF', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4053747'), (u'2014-06-11 09:20:41', u'https://www.codechef.com/PRACTICE/problems/STONES', u'STONES', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4053906'), (u'2014-06-11 09:23:05', u'https://www.codechef.com/PRACTICE/problems/STONES', u'STONES', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4053914'), (u'2014-06-11 09:28:01', u'https://www.codechef.com/PRACTICE/problems/STONES', u'STONES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4053935'), (u'2014-06-11 09:46:27', u'https://www.codechef.com/PRACTICE/problems/SPCANDY', u'SPCANDY', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/4054028'), (u'2014-06-11 09:49:08', u'https://www.codechef.com/PRACTICE/problems/SPCANDY', u'SPCANDY', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4054050'), (u'2014-06-11 09:50:14', u'https://www.codechef.com/PRACTICE/problems/SPCANDY', u'SPCANDY', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4054056'), (u'2014-06-11 10:13:17', u'https://www.codechef.com/PRACTICE/problems/DIVIDING', u'DIVIDING', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4054186'), (u'2014-06-11 10:17:20', u'https://www.codechef.com/PRACTICE/problems/DIVIDING', u'DIVIDING', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4054200'), (u'2014-06-11 10:21:20', u'https://www.codechef.com/PRACTICE/problems/DIVIDING', u'DIVIDING', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4054222'), (u'2014-06-11 10:46:57', u'https://www.codechef.com/PRACTICE/problems/APPROX', u'APPROX', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4054403'), (u'2014-06-11 11:11:10', u'https://www.codechef.com/PRACTICE/problems/COMPILER', u'COMPILER', 'CE', u'0', u'ADA', 'https://www.codechef.com/viewsolution/4054561'), (u'2014-06-11 11:11:59', u'https://www.codechef.com/PRACTICE/problems/COMPILER', u'COMPILER', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4054571'), (u'2014-06-11 16:59:23', u'https://www.codechef.com/PRACTICE/problems/AMSGAME1', u'AMSGAME1', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4057988'), (u'2014-06-11 17:05:35', u'https://www.codechef.com/PRACTICE/problems/AMSGAME1', u'AMSGAME1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4058067'), (u'2014-06-29 01:44:47', u'https://www.codechef.com/PRACTICE/problems/TREEROOT', u'TREEROOT', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4152751'), (u'2014-06-29 02:02:26', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', u'TLE', u'0', u'PYTH', 'https://www.codechef.com/viewsolution/4152798'), (u'2014-07-04 20:23:15', u'https://www.codechef.com/JULY14/problems/CSUB', u'CSUB', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4188769'), (u'2014-07-04 20:35:55', u'https://www.codechef.com/JULY14/problems/CSUB', u'CSUB', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4189092'), (u'2014-07-04 20:42:22', u'https://www.codechef.com/JULY14/problems/CSUB', u'CSUB', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4189260'), (u'2014-07-04 20:56:59', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4189643'), (u'2014-07-04 20:58:35', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4189684'), (u'2014-07-04 21:29:16', u'https://www.codechef.com/JULY14/problems/CSUB', u'CSUB', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4190477'), (u'2014-07-05 03:32:13', u'https://www.codechef.com/PRACTICE/problems/SPOTWO', u'SPOTWO', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4198760'), (u'2014-07-05 04:31:23', u'https://www.codechef.com/PRACTICE/problems/REMISS', u'REMISS', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4199244'), (u'2014-07-05 04:48:17', u'https://www.codechef.com/PRACTICE/problems/POTATOES', u'POTATOES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4199368'), (u'2014-07-05 04:58:55', u'https://www.codechef.com/PRACTICE/problems/SDSQUARE', u'SDSQUARE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4199453'), (u'2014-07-05 05:05:28', u'https://www.codechef.com/PRACTICE/problems/SDSQUARE', u'SDSQUARE', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4199504'), (u'2014-07-05 05:14:54', u'https://www.codechef.com/PRACTICE/problems/SDSQUARE', u'SDSQUARE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4199569'), (u'2014-07-05 05:19:30', u'https://www.codechef.com/PRACTICE/problems/SDSQUARE', u'SDSQUARE', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4199592'), (u'2014-07-05 05:44:04', u'https://www.codechef.com/PRACTICE/problems/NOLOGIC', u'NOLOGIC', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4199717'), (u'2014-07-12 02:26:44', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4303371'), (u'2014-07-12 03:17:04', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4303603'), (u'2014-07-12 03:17:04', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4303608'), (u'2014-07-12 03:17:04', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4303611'), (u'2014-07-12 03:17:45', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4303624'), (u'2014-07-12 03:22:54', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4303651'), (u'2014-07-12 03:25:18', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4303661'), (u'2014-07-12 03:28:45', u'https://www.codechef.com/JULY14/problems/RETPO', u'RETPO', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4303679'), (u'2014-07-12 15:12:46', u'https://www.codechef.com/JULY14/problems/FROGV', u'FROGV', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/4307292'), (u'2014-07-13 01:07:50', u'https://www.codechef.com/JULY14/problems/FROGV', u'FROGV', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4312732'), (u'2014-07-17 02:00:29', u'https://www.codechef.com/PRACTICE/problems/BINTREE', u'BINTREE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4337506'), (u'2014-07-17 02:02:30', u'https://www.codechef.com/PRACTICE/problems/BINTREE', u'BINTREE', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4337509'), (u'2014-07-17 21:02:13', u'https://www.codechef.com/PRACTICE/problems/LUCKYSTR', u'LUCKYSTR', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/4339419'), (u'2014-07-17 21:03:35', u'https://www.codechef.com/PRACTICE/problems/LUCKYSTR', u'LUCKYSTR', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4339420'), (u'2014-07-17 21:49:38', u'https://www.codechef.com/PRACTICE/problems/NOLOGIC', u'NOLOGIC', u'WA', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4339533'), (u'2014-07-17 21:54:01', u'https://www.codechef.com/PRACTICE/problems/NOLOGIC', u'NOLOGIC', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4339548'), (u'2014-07-17 21:55:43', u'https://www.codechef.com/PRACTICE/problems/NOLOGIC', u'NOLOGIC', u'TLE', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4339554'), (u'2014-07-17 21:58:37', u'https://www.codechef.com/PRACTICE/problems/NOLOGIC', u'NOLOGIC', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/4339563'), (u'2014-07-17 21:59:31', u'https://www.codechef.com/PRACTICE/problems/NOLOGIC', u'NOLOGIC', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4339567'), (u'2014-07-18 00:42:33', u'https://www.codechef.com/PRACTICE/problems/VOTERS', u'VOTERS', u'TLE', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340137'), (u'2014-07-18 01:15:31', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'WA', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340237'), (u'2014-07-18 01:17:19', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'WA', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340243'), (u'2014-07-18 01:21:53', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'WA', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340259'), (u'2014-07-18 01:24:29', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'WA', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340266'), (u'2014-07-18 01:38:21', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340317'), (u'2014-07-18 01:41:49', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340328'), (u'2014-07-18 02:11:22', u'https://www.codechef.com/PRACTICE/problems/COMPILER', u'COMPILER', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4340405'), (u'2014-07-18 02:13:00', u'https://www.codechef.com/PRACTICE/problems/COMPILER', u'COMPILER', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4340412'), (u'2014-07-18 02:15:57', u'https://www.codechef.com/PRACTICE/problems/COMPILER', u'COMPILER', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4340421'), (u'2014-07-18 03:08:59', u'https://www.codechef.com/PRACTICE/problems/WSTRING', u'WSTRING', u'WA', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340523'), (u'2014-07-18 03:18:59', u'https://www.codechef.com/PRACTICE/problems/WSTRING', u'WSTRING', u'WA', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340535'), (u'2014-07-18 04:45:18', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/4340638'), (u'2014-07-18 04:46:15', u'https://www.codechef.com/PRACTICE/problems/RRCODE', u'RRCODE', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4340641'), (u'2014-07-18 04:50:29', u'https://www.codechef.com/PRACTICE/problems/BINTREE', u'BINTREE', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4340644'), (u'2014-07-18 04:55:56', u'https://www.codechef.com/PRACTICE/problems/RETPO', u'RETPO', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4340648'), (u'2014-07-18 04:58:27', u'https://www.codechef.com/PRACTICE/problems/BINTREE', u'BINTREE', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4340649'), (u'2014-07-18 05:04:58', u'https://www.codechef.com/PRACTICE/problems/RRMATRIX', u'RRMATRIX', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4340655'), (u'2014-07-18 05:05:52', u'https://www.codechef.com/PRACTICE/problems/RRMATRIX', u'RRMATRIX', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4340657'), (u'2014-07-21 18:05:27', u'https://www.codechef.com/PRACTICE/problems/RRCOPY', u'RRCOPY', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4362844'), (u'2014-07-21 18:24:11', u'https://www.codechef.com/PRACTICE/problems/RRCOPY', u'RRCOPY', u'WA', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4362928'), (u'2014-07-21 18:25:05', u'https://www.codechef.com/PRACTICE/problems/RRCOPY', u'RRCOPY', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4362933'), (u'2014-07-21 18:45:33', u'https://www.codechef.com/PRACTICE/problems/RRSUM', u'RRSUM', u'TLE', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4363040'), (u'2014-07-21 18:49:18', u'https://www.codechef.com/PRACTICE/problems/RRSUM', u'RRSUM', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4363058'), (u'2014-07-21 18:50:51', u'https://www.codechef.com/PRACTICE/problems/RRSUM', u'RRSUM', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4363066'), (u'2014-07-23 00:10:48', u'https://www.codechef.com/PRACTICE/problems/RECTQUER', u'RECTQUER', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4367826'), (u'2014-07-23 01:00:49', u'https://www.codechef.com/PRACTICE/problems/RECTQUER', u'RECTQUER', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4368006'), (u'2014-07-23 01:03:50', u'https://www.codechef.com/PRACTICE/problems/RECTQUER', u'RECTQUER', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4368015'), (u'2014-07-23 01:32:36', u'https://www.codechef.com/PRACTICE/problems/RECTQUER', u'RECTQUER', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4368102'), (u'2014-07-26 00:16:20', u'https://www.codechef.com/PRACTICE/problems/DOUBLE', u'DOUBLE', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4377912'), (u'2014-07-26 00:18:23', u'https://www.codechef.com/PRACTICE/problems/DOUBLE', u'DOUBLE', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4377917'), (u'2014-07-26 00:44:31', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4377999'), (u'2014-07-27 02:46:17', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'TLE', u'0', u'PYTH', 'https://www.codechef.com/viewsolution/4382136'), (u'2014-07-27 02:52:14', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4382143'), (u'2014-07-27 02:55:35', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'TLE', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4382152'), (u'2014-07-27 02:56:53', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'TLE', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4382155'), (u'2014-07-27 02:58:43', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4382159'), (u'2014-07-27 02:59:30', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4382160'), (u'2014-07-27 03:01:22', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4382164'), (u'2014-07-27 03:13:49', u'https://www.codechef.com/PRACTICE/problems/INTEST', u'INTEST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4382175'), (u'2014-07-31 22:31:14', u'https://www.codechef.com/PRACTICE/problems/MARBLES', u'MARBLES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4410407'), (u'2014-07-31 22:32:41', u'https://www.codechef.com/PRACTICE/problems/MARBLES', u'MARBLES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4410421'), (u'2014-07-31 22:36:40', u'https://www.codechef.com/PRACTICE/problems/MARBLES', u'MARBLES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4410455'), (u'2014-07-31 22:37:34', u'https://www.codechef.com/PRACTICE/problems/MARBLES', u'MARBLES', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4410461'), (u'2014-08-01 16:03:33', u'https://www.codechef.com/AUG14/problems/PRGIFT', u'PRGIFT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4418584'), (u'2014-08-01 16:10:06', u'https://www.codechef.com/AUG14/problems/PRGIFT', u'PRGIFT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4418854'), (u'2014-08-01 16:16:14', u'https://www.codechef.com/AUG14/problems/PRGIFT', u'PRGIFT', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4419068'), (u'2014-08-01 16:28:32', u'https://www.codechef.com/AUG14/problems/PRGIFT', u'PRGIFT', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4419429'), (u'2014-08-01 21:14:20', u'https://www.codechef.com/AUG14/problems/PRGIFT', u'PRGIFT', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4427549'), (u'2014-08-01 22:22:40', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/4428946'), (u'2014-08-01 22:24:47', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4428994'), (u'2014-08-01 22:25:57', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4429019'), (u'2014-08-01 22:26:55', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4429047'), (u'2014-08-02 21:41:49', u'https://www.codechef.com/AUG14/problems/CRAWA', u'CRAWA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4448115'), (u'2014-08-02 21:43:44', u'https://www.codechef.com/AUG14/problems/CRAWA', u'CRAWA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4448136'), (u'2014-08-02 21:51:09', u'https://www.codechef.com/AUG14/problems/CRAWA', u'CRAWA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4448237'), (u'2014-08-02 21:58:27', u'https://www.codechef.com/AUG14/problems/CRAWA', u'CRAWA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4448341'), (u'2014-08-02 23:04:07', u'https://www.codechef.com/AUG14/problems/CRAWA', u'CRAWA', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4449507'), (u'2014-08-06 14:47:12', u'https://www.codechef.com/AUG14/problems/CLETAB', u'CLETAB', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4494226'), (u'2014-08-07 22:22:52', u'https://www.codechef.com/AUG14/problems/CLETAB', u'CLETAB', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4508709'), (u'2014-08-07 22:57:57', u'https://www.codechef.com/AUG14/problems/CLETAB', u'CLETAB', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4509134'), (u'2014-08-07 23:22:17', u'https://www.codechef.com/AUG14/problems/CLETAB', u'CLETAB', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4509429'), (u'2014-08-07 23:31:23', u'https://www.codechef.com/AUG14/problems/CLETAB', u'CLETAB', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4509535'), (u'2014-08-10 02:57:09', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4530125'), (u'2014-08-10 03:03:19', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4530154'), (u'2014-08-10 03:14:11', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4530189'), (u'2014-08-10 03:17:14', u'https://www.codechef.com/PRACTICE/problems/PRIME1', u'PRIME1', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4530195'), (u'2014-08-10 14:56:08', u'https://www.codechef.com/AUG14/problems/REVERSE', u'REVERSE', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/4533200'), (u'2014-08-10 15:14:30', u'https://www.codechef.com/AUG14/problems/REVERSE', u'REVERSE', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4533367'), (u'2014-08-10 17:29:15', u'https://www.codechef.com/PRCNSR14/problems/GAME2048', u'GAME2048', 'RE', u'0', u'C', 'https://www.codechef.com/viewsolution/4535341'), (u'2014-08-10 17:30:22', u'https://www.codechef.com/PRCNSR14/problems/GAME2048', u'GAME2048', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4535393'), (u'2014-08-10 17:33:44', u'https://www.codechef.com/PRCNSR14/problems/GAME2048', u'GAME2048', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4535586'), (u'2014-08-10 17:34:51', u'https://www.codechef.com/PRCNSR14/problems/GAME2048', u'GAME2048', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4535650'), (u'2014-08-10 17:37:42', u'https://www.codechef.com/PRCNSR14/problems/GAME2048', u'GAME2048', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4535810'), (u'2014-08-10 17:39:14', u'https://www.codechef.com/PRCNSR14/problems/GAME2048', u'GAME2048', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4535898'), (u'2014-08-10 17:40:19', u'https://www.codechef.com/PRCNSR14/problems/GAME2048', u'GAME2048', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4535965'), (u'2014-08-10 17:47:23', u'https://www.codechef.com/PRCNSR14/problems/HLPSUG', u'HLPSUG', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4536336'), (u'2014-08-10 18:03:45', u'https://www.codechef.com/PRCNSR14/problems/HPYBDAY', u'HPYBDAY', u'TLE', u'0', u'C', 'https://www.codechef.com/viewsolution/4537126'), (u'2014-08-10 18:25:49', u'https://www.codechef.com/PRCNSR14/problems/HPYBDAY', u'HPYBDAY', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4538160'), (u'2014-08-10 18:27:37', u'https://www.codechef.com/PRCNSR14/problems/HPYBDAY', u'HPYBDAY', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4538244'), (u'2014-08-10 19:11:26', u'https://www.codechef.com/PRCNSR14/problems/PLTGRP', u'PLTGRP', u'TLE', u'0', u'C++11', 'https://www.codechef.com/viewsolution/4539947'), (u'2014-10-03 19:51:34', u'https://www.codechef.com/OCT14/problems/CHEFGR', u'CHEFGR', u'AC', u'0', u'C++ 4.8.1', 'https://www.codechef.com/viewsolution/4962359'), (u'2014-10-03 19:55:30', u'https://www.codechef.com/OCT14/problems/CHEFGR', u'CHEFGR', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4962494'), (u'2014-10-04 01:01:28', u'https://www.codechef.com/OCT14/problems/PRLADDU', u'PRLADDU', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4970823'), (u'2014-10-04 02:02:38', u'https://www.codechef.com/OCT14/problems/PRLADDU', u'PRLADDU', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/4972114'), (u'2014-10-04 02:05:31', u'https://www.codechef.com/OCT14/problems/PRLADDU', u'PRLADDU', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4972172'), (u'2014-10-04 02:08:04', u'https://www.codechef.com/OCT14/problems/PRLADDU', u'PRLADDU', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4972219'), (u'2014-10-04 02:10:59', u'https://www.codechef.com/OCT14/problems/PRLADDU', u'PRLADDU', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/4972279'), (u'2014-10-05 19:11:22', u'https://www.codechef.com/OCT14/problems/FATCHEF', u'FATCHEF', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/5008560'), (u'2014-10-05 19:46:59', u'https://www.codechef.com/OCT14/problems/PRPOTION', u'PRPOTION', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5009210'), (u'2014-10-05 20:09:50', u'https://www.codechef.com/OCT14/problems/PRPOTION', u'PRPOTION', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/5009564'), (u'2014-10-08 01:48:44', u'https://www.codechef.com/OCT14/problems/CHEFSQUA', u'CHEFSQUA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5046189'), (u'2014-10-08 19:42:52', u'https://www.codechef.com/OCT14/problems/CHEFSQUA', u'CHEFSQUA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5056254'), (u'2014-10-08 20:45:51', u'https://www.codechef.com/OCT14/problems/CHEFSQUA', u'CHEFSQUA', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/5057583'), (u'2014-10-08 20:47:41', u'https://www.codechef.com/OCT14/problems/CHEFSQUA', u'CHEFSQUA', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5057620'), (u'2014-10-08 20:49:47', u'https://www.codechef.com/OCT14/problems/CHEFSQUA', u'CHEFSQUA', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/5057673'), (u'2014-11-07 22:42:18', u'https://www.codechef.com/NOV14/problems/DISCHAR', u'DISCHAR', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5286888'), (u'2014-11-08 15:04:37', u'https://www.codechef.com/NOV14/problems/PRPALN', u'PRPALN', 'PS', u'35', u'C', 'https://www.codechef.com/viewsolution/5300598'), (u'2014-11-08 16:15:45', u'https://www.codechef.com/NOV14/problems/PRPALN', u'PRPALN', 'PS', u'35', u'C', 'https://www.codechef.com/viewsolution/5302106'), (u'2014-11-08 16:24:02', u'https://www.codechef.com/NOV14/problems/PRPALN', u'PRPALN', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5302275'), (u'2014-11-08 16:28:35', u'https://www.codechef.com/NOV14/problems/PRPALN', u'PRPALN', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5302355'), (u'2014-11-08 17:36:31', u'https://www.codechef.com/NOV14/problems/CHEFSEG', u'CHEFSEG', 'PS', u'40', u'C', 'https://www.codechef.com/viewsolution/5303576'), (u'2014-11-08 17:49:57', u'https://www.codechef.com/NOV14/problems/CHEFSEG', u'CHEFSEG', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5303832'), (u'2014-11-08 23:45:46', u'https://www.codechef.com/NOV14/problems/RBTREE', u'RBTREE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5310161'), (u'2014-11-09 00:16:54', u'https://www.codechef.com/NOV14/problems/RBTREE', u'RBTREE', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5310716'), (u'2014-11-09 00:22:33', u'https://www.codechef.com/NOV14/problems/RBTREE', u'RBTREE', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5310827'), (u'2014-11-09 20:55:47', u'https://www.codechef.com/NOV14/problems/CHEFWORD', u'CHEFWORD', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5322719'), (u'2014-11-09 21:00:47', u'https://www.codechef.com/NOV14/problems/CHEFWORD', u'CHEFWORD', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5322778'), (u'2014-11-17 01:56:38', u'https://www.codechef.com/CDSM2014/problems/CHFMAX', u'CHFMAX', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/5414098'), (u'2014-11-17 02:10:10', u'https://www.codechef.com/CDSM2014/problems/CHEFTR', u'CHEFTR', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/5414268'), (u'2014-12-06 02:22:06', u'https://www.codechef.com/DEC14/problems/CAPPLE', u'CAPPLE', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5499111'), (u'2014-12-06 02:27:09', u'https://www.codechef.com/DEC14/problems/CAPPLE', u'CAPPLE', 'PS', u'52', u'C', 'https://www.codechef.com/viewsolution/5499146'), (u'2014-12-06 02:28:40', u'https://www.codechef.com/DEC14/problems/CAPPLE', u'CAPPLE', 'PS', u'52', u'C', 'https://www.codechef.com/viewsolution/5499158'), (u'2014-12-06 02:30:42', u'https://www.codechef.com/DEC14/problems/CAPPLE', u'CAPPLE', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5499166'), (u'2015-01-02 15:18:34', u'https://www.codechef.com/JAN15/problems/GCDQ', u'GCDQ', 'PS', u'40', u'C', 'https://www.codechef.com/viewsolution/5679296'), (u'2015-01-02 15:20:33', u'https://www.codechef.com/JAN15/problems/GCDQ', u'GCDQ', 'PS', u'40', u'C', 'https://www.codechef.com/viewsolution/5679371'), (u'2015-01-02 15:37:03', u'https://www.codechef.com/JAN15/problems/CHEFSTON', u'CHEFSTON', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5679960'), (u'2015-01-02 16:16:32', u'https://www.codechef.com/JAN15/problems/GCDQ', u'GCDQ', 'PS', u'40', u'C', 'https://www.codechef.com/viewsolution/5681465'), (u'2015-01-03 21:23:57', u'https://www.codechef.com/JAN15/problems/GCDQ', u'GCDQ', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5722527'), (u'2015-01-03 21:36:43', u'https://www.codechef.com/JAN15/problems/SEAVOTE', u'SEAVOTE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5722845'), (u'2015-01-03 21:50:45', u'https://www.codechef.com/JAN15/problems/SEAVOTE', u'SEAVOTE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5723185'), (u'2015-01-06 23:28:39', u'https://www.codechef.com/JAN15/problems/SEAVOTE', u'SEAVOTE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5788244'), (u'2015-01-06 23:44:15', u'https://www.codechef.com/JAN15/problems/SEAVOTE', u'SEAVOTE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5788578'), (u'2015-01-06 23:55:07', u'https://www.codechef.com/JAN15/problems/SEAVOTE', u'SEAVOTE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/5788839'), (u'2015-01-07 00:02:10', u'https://www.codechef.com/JAN15/problems/SEAVOTE', u'SEAVOTE', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/5788999'), (u'2015-03-07 03:45:05', u'https://www.codechef.com/MARCH15/problems/CNOTE', u'CNOTE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/6413565'), (u'2015-03-07 06:18:00', u'https://www.codechef.com/MARCH15/problems/CNOTE', u'CNOTE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/6414065'), (u'2015-03-09 22:29:34', u'https://www.codechef.com/MARCH15/problems/CNOTE', u'CNOTE', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/6447577'), (u'2015-03-09 22:36:29', u'https://www.codechef.com/MARCH15/problems/CNOTE', u'CNOTE', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/6447698'), (u'2015-03-09 22:38:36', u'https://www.codechef.com/MARCH15/problems/CNOTE', u'CNOTE', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/6447737'), (u'2015-05-12 02:41:11', u'https://www.codechef.com/MAY15/problems/CHEFRP', u'CHEFRP', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/6900569'), (u'2015-05-12 03:05:02', u'https://www.codechef.com/MAY15/problems/CHEFRP', u'CHEFRP', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/6900712'), (u'2015-05-13 15:59:16', u'https://www.codechef.com/MAY15/problems/CHAPD', u'CHAPD', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/6917484'), (u'2015-05-26 03:53:20', u'https://www.codechef.com/PRACTICE/problems/CFRTEST', u'CFRTEST', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/7024771'), (u'2015-05-26 04:46:33', u'https://www.codechef.com/PRACTICE/problems/REARRSTR', u'REARRSTR', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/7024793'), (u'2015-05-26 04:54:59', u'https://www.codechef.com/PRACTICE/problems/CHAPD', u'CHAPD', u'AC', u'100', u'C++ 4.3.2', 'https://www.codechef.com/viewsolution/7024795'), (u'2015-05-30 07:38:40', u'https://www.codechef.com/PRACTICE/problems/PINOCH1', u'PINOCH1', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/7043758'), (u'2015-05-30 07:47:02', u'https://www.codechef.com/PRACTICE/problems/PINOCH1', u'PINOCH1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/7044118'), (u'2015-05-30 07:49:48', u'https://www.codechef.com/PRACTICE/problems/PINOCH1', u'PINOCH1', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/7044235'), (u'2015-05-30 08:04:35', u'https://www.codechef.com/PRACTICE/problems/PINOCH2', u'PINOCH2', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/7044809'), (u'2015-05-30 08:09:02', u'https://www.codechef.com/PRACTICE/problems/PINOCH2', u'PINOCH2', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/7044972'), (u'2015-05-30 08:27:56', u'https://www.codechef.com/PRACTICE/problems/RACEWARS', u'RACEWARS', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/7045779'), (u'2015-05-30 08:28:38', u'https://www.codechef.com/PRACTICE/problems/RACEWARS', u'RACEWARS', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7045826'), (u'2015-05-30 08:31:07', u'https://www.codechef.com/PRACTICE/problems/MXZERO', u'MXZERO', u'AC', u'0', u'C', 'https://www.codechef.com/viewsolution/7045937'), (u'2015-05-30 09:22:29', u'https://www.codechef.com/PRACTICE/problems/RACEWARS', u'RACEWARS', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7046383'), (u'2015-05-30 09:34:19', u'https://www.codechef.com/PRACTICE/problems/HOBB', u'HOBB', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/7046431'), (u'2015-05-30 12:48:40', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', u'WA', u'0', u'C', 'https://www.codechef.com/viewsolution/7047261'), (u'2015-05-30 12:50:41', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/7047270'), (u'2015-06-08 22:03:40', u'https://www.codechef.com/JUNE15/problems/CBARG', u'CBARG', 'PS', u'30', u'C', 'https://www.codechef.com/viewsolution/7139999'), (u'2015-06-08 22:10:35', u'https://www.codechef.com/JUNE15/problems/CBARG', u'CBARG', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/7140098'), (u'2015-06-09 17:03:07', u'https://www.codechef.com/JUNE15/problems/CHPLGNS', u'CHPLGNS', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7150141'), (u'2015-06-09 22:09:57', u'https://www.codechef.com/JUNE15/problems/CHPLGNS', u'CHPLGNS', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/7153650'), (u'2015-06-09 22:11:02', u'https://www.codechef.com/JUNE15/problems/CHPLGNS', u'CHPLGNS', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7153663'), (u'2015-06-10 17:52:59', u'https://www.codechef.com/JUNE15/problems/CHPLGNS', u'CHPLGNS', 'PS', u'10', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7163596'), (u'2015-06-10 18:02:31', u'https://www.codechef.com/JUNE15/problems/CHPLGNS', u'CHPLGNS', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7163696'), (u'2015-06-10 23:15:58', u'https://www.codechef.com/JUNE15/problems/CHPLGNS', u'CHPLGNS', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7168947'), (u'2015-06-10 23:27:43', u'https://www.codechef.com/PRACTICE/problems/R303', u'R303', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7169121'), (u'2015-06-11 00:01:43', u'https://www.codechef.com/PRACTICE/problems/R303', u'R303', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7169540'), (u'2015-07-04 02:09:01', u'https://www.codechef.com/JULY15/problems/CHCUBE', u'CHCUBE', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7339812'), (u'2015-07-04 02:49:18', u'https://www.codechef.com/JULY15/problems/LCKYST', u'LCKYST', 'PS', u'8', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7340359'), (u'2015-07-04 02:55:39', u'https://www.codechef.com/JULY15/problems/LCKYST', u'LCKYST', 'PS', u'30', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7340422'), (u'2015-07-04 02:57:16', u'https://www.codechef.com/JULY15/problems/LCKYST', u'LCKYST', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7340447'), (u'2015-07-04 02:59:52', u'https://www.codechef.com/JULY15/problems/LCKYST', u'LCKYST', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7340475'), (u'2015-07-06 15:49:58', u'https://www.codechef.com/JULY15/problems/EGBOBRD', u'EGBOBRD', 'PS', u'15', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7381337'), (u'2015-07-06 15:57:35', u'https://www.codechef.com/JULY15/problems/EGBOBRD', u'EGBOBRD', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7381445'), (u'2015-07-07 20:01:02', u'https://www.codechef.com/JULY15/problems/EGBOBRD', u'EGBOBRD', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7399011'), (u'2015-07-07 20:05:22', u'https://www.codechef.com/JULY15/problems/EGBOBRD', u'EGBOBRD', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7399073'), (u'2015-07-08 00:31:24', u'https://www.codechef.com/JULY15/problems/ADDMUL', u'ADDMUL', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7402380'), (u'2015-07-08 00:33:00', u'https://www.codechef.com/JULY15/problems/ADDMUL', u'ADDMUL', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7402406'), (u'2015-07-12 10:52:20', u'https://www.codechef.com/JULY15/problems/ADDMUL', u'ADDMUL', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7456100'), (u'2015-08-07 17:28:06', u'https://www.codechef.com/AUG15/problems/COOKMACH', u'COOKMACH', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7640195'), (u'2015-08-10 17:08:30', u'https://www.codechef.com/AUG15/problems/GRGUY', u'GRGUY', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7720771'), (u'2015-08-10 19:18:54', u'https://www.codechef.com/AUG15/problems/ADMAG', u'ADMAG', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/7723401'), (u'2015-08-12 06:04:32', u'https://www.codechef.com/AUG15/problems/WOUT', u'WOUT', 'PS', u'25', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7751317'), (u'2015-08-12 06:10:36', u'https://www.codechef.com/AUG15/problems/WOUT', u'WOUT', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7751339'), (u'2015-08-12 06:14:26', u'https://www.codechef.com/AUG15/problems/WOUT', u'WOUT', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7751353'), (u'2015-08-16 00:04:50', u'https://www.codechef.com/PRACTICE/problems/RRATING', u'RRATING', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7817713'), (u'2015-08-16 00:27:10', u'https://www.codechef.com/PRACTICE/problems/RRATING', u'RRATING', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7818066'), (u'2015-08-16 00:37:49', u'https://www.codechef.com/PRACTICE/problems/RRATING', u'RRATING', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7818234'), (u'2015-08-16 00:46:49', u'https://www.codechef.com/PRACTICE/problems/RRATING', u'RRATING', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7818371'), (u'2015-08-16 00:52:48', u'https://www.codechef.com/PRACTICE/problems/RRATING', u'RRATING', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7818462'), (u'2015-08-16 01:06:50', u'https://www.codechef.com/PRACTICE/problems/RRATING', u'RRATING', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7818659'), (u'2015-08-16 01:11:04', u'https://www.codechef.com/PRACTICE/problems/RRATING', u'RRATING', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7818713'), (u'2015-08-16 01:27:22', u'https://www.codechef.com/PRACTICE/problems/RRATING', u'RRATING', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7818980'), (u'2015-08-23 21:36:59', u'https://www.codechef.com/COOK61/problems/CARDLINE', u'CARDLINE', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7898648'), (u'2015-08-23 21:41:10', u'https://www.codechef.com/COOK61/problems/TWOSTR', u'TWOSTR', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7898953'), (u'2015-08-23 21:58:03', u'https://www.codechef.com/COOK61/problems/XORNUBER', u'XORNUBER', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7901142'), (u'2015-08-23 22:06:19', u'https://www.codechef.com/COOK61/problems/XORNUBER', u'XORNUBER', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/7902094'), (u'2015-09-10 02:09:12', u'https://www.codechef.com/SEPT15/problems/MSTEP', u'MSTEP', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8102573'), (u'2015-09-10 02:51:18', u'https://www.codechef.com/SEPT15/problems/DONUTS', u'DONUTS', 'PS', u'30', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8102955'), (u'2015-09-10 20:48:37', u'https://www.codechef.com/SEPT15/problems/DONUTS', u'DONUTS', 'PS', u'10', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8112817'), (u'2015-09-10 21:39:10', u'https://www.codechef.com/SEPT15/problems/DONUTS', u'DONUTS', 'PS', u'40', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8113610'), (u'2015-09-12 08:08:58', u'https://www.codechef.com/SEPT15/problems/DONUTS', u'DONUTS', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8132761'), (u'2015-09-12 08:19:28', u'https://www.codechef.com/SEPT15/problems/DONUTS', u'DONUTS', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8132775'), (u'2015-09-12 22:15:45', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8142069'), (u'2015-09-12 22:23:17', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8142165'), (u'2015-09-12 22:31:16', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8142257'), (u'2015-09-12 22:35:11', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8142304'), (u'2015-09-12 22:52:32', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8142551'), (u'2015-09-12 22:58:28', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8142618'), (u'2015-09-12 23:03:31', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8142689'), (u'2015-09-12 23:06:41', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/8142738'), (u'2015-09-12 23:09:39', u'https://www.codechef.com/SEPT15/problems/BANROB', u'BANROB', u'AC', u'100', u'C', 'https://www.codechef.com/viewsolution/8142768'), (u'2015-09-20 22:05:39', u'https://www.codechef.com/COOK62/problems/FRGTNLNG', u'FRGTNLNG', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8212884'), (u'2015-09-20 22:34:31', u'https://www.codechef.com/COOK62/problems/STACKS', u'STACKS', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8215005'), (u'2015-09-20 23:10:47', u'https://www.codechef.com/COOK62/problems/STACKS', u'STACKS', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8217486'), (u'2015-09-20 23:16:22', u'https://www.codechef.com/COOK62/problems/STACKS', u'STACKS', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8217838'), (u'2015-09-21 13:34:29', u'https://www.codechef.com/PRACTICE/problems/FRGTNLNG', u'FRGTNLNG', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8222436'), (u'2015-09-25 21:08:04', u'https://www.codechef.com/PRACTICE/problems/TPRODUCT', u'TPRODUCT', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8245383'), (u'2015-09-25 21:15:54', u'https://www.codechef.com/PRACTICE/problems/TPRODUCT', u'TPRODUCT', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8245418'), (u'2015-09-25 21:30:38', u'https://www.codechef.com/PRACTICE/problems/TPRODUCT', u'TPRODUCT', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8245472'), (u'2015-09-25 21:37:47', u'https://www.codechef.com/PRACTICE/problems/TPRODUCT', u'TPRODUCT', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8245498'), (u'2015-09-27 19:14:01', u'https://www.codechef.com/PRACTICE/problems/SPALNUM', u'SPALNUM', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8266897'), (u'2015-09-27 19:19:39', u'https://www.codechef.com/PRACTICE/problems/SPALNUM', u'SPALNUM', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8267017'), (u'2015-09-27 19:23:52', u'https://www.codechef.com/PRACTICE/problems/SPALNUM', u'SPALNUM', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8267096'), (u'2015-09-29 21:53:04', u'https://www.codechef.com/PRACTICE/problems/LUCKY', u'LUCKY', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8280451'), (u'2015-10-20 09:59:02', u'https://www.codechef.com/PRACTICE/problems/ASP', u'ASP', u'WA', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8594490'), (u'2015-10-20 10:00:30', u'https://www.codechef.com/PRACTICE/problems/ASP', u'ASP', u'AC', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8594496'), (u'2015-12-14 23:46:01', u'https://www.codechef.com/PRACTICE/problems/CHEFST', u'CHEFST', u'TLE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8959065'), (u'2015-12-14 23:47:46', u'https://www.codechef.com/PRACTICE/problems/CHEFST', u'CHEFST', 'PS', u'30', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8959080'), (u'2015-12-15 00:01:01', u'https://www.codechef.com/PRACTICE/problems/CHEFST', u'CHEFST', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/8959153'), (u'2016-05-14 16:46:03', u'https://www.codechef.com/PRACTICE/problems/KOL1509', u'KOL1509', 'RE', u'0', u'C++14', 'https://www.codechef.com/viewsolution/10082758'), (u'2016-06-05 13:55:56', u'https://www.codechef.com/JUNE16/problems/DEVARRAY', u'DEVARRAY', 'CE', u'0', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/10333457'), (u'2016-06-05 13:59:32', u'https://www.codechef.com/JUNE16/problems/DEVARRAY', u'DEVARRAY', u'AC', u'100', u'C++ 4.9.2', 'https://www.codechef.com/viewsolution/10333552'), (u'2017-11-03 00:35:24', u'https://www.codechef.com/PRACTICE/problems/BLACKCOM', u'BLACKCOM', 'CE', u'0', u'C++ 6.3', 'https://www.codechef.com/viewsolution/16037895'), (u'2017-11-03 00:41:17', u'https://www.codechef.com/PRACTICE/problems/BLACKCOM', u'BLACKCOM', u'WA', u'0', u'PYTH', 'https://www.codechef.com/viewsolution/16037935'), (u'2017-12-03 19:26:28', u'https://www.codechef.com/PRACTICE/problems/WEICOM', u'WEICOM', u'WA', u'0', u'PYTH', 'https://www.codechef.com/viewsolution/16433447'), (u'2018-10-07 19:12:16', u'https://www.codechef.com/PRACTICE/problems/BLACKCOM', u'BLACKCOM', 'CE', u'0', u'C++14', 'https://www.codechef.com/viewsolution/20545692'), (u'2018-10-23 22:36:07', u'https://www.codechef.com/PRACTICE/problems/SURCHESS', u'SURCHESS', 'CE', u'0', u'C++14', 'https://www.codechef.com/viewsolution/21187090'), (u'2018-11-07 12:50:39', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', 'CE', u'0', u'C', 'https://www.codechef.com/viewsolution/21518903'), (u'2018-11-07 12:51:53', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', u'WA', u'0', u'C++14', 'https://www.codechef.com/viewsolution/21518924'), (u'2018-11-07 12:57:36', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', u'WA', u'0', u'C++14', 'https://www.codechef.com/viewsolution/21519029'), (u'2018-11-07 12:58:22', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', u'WA', u'0', u'C++14', 'https://www.codechef.com/viewsolution/21519043'), (u'2018-11-07 13:00:37', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', u'WA', u'0', u'C++14', 'https://www.codechef.com/viewsolution/21519089'), (u'2018-11-07 13:02:45', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', 'PS', u'50', u'C++14', 'https://www.codechef.com/viewsolution/21519127'), (u'2018-11-07 13:08:22', u'https://www.codechef.com/PRACTICE/problems/TICKETS5', u'TICKETS5', u'AC', u'100', u'C++14', 'https://www.codechef.com/viewsolution/21519248')],
"CodeForces": [('2014-06-20 14:16:29', u'http://www.codeforces.com/problemset/problem/443/A', u'Anton and Letters', 'CE', '0', u'GNU C', 'http://www.codeforces.com/contest/443/submission/6926377'), ('2014-06-20 14:17:29', u'http://www.codeforces.com/problemset/problem/443/A', u'Anton and Letters', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/443/submission/6926384'), ('2014-06-20 15:14:05', u'http://www.codeforces.com/problemset/problem/1/A', u'Theatre Square', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/1/submission/6926712'), ('2014-06-20 15:19:19', u'http://www.codeforces.com/problemset/problem/1/A', u'Theatre Square', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/1/submission/6926744'), ('2014-06-20 15:35:33', u'http://www.codeforces.com/problemset/problem/1/A', u'Theatre Square', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/1/submission/6926822'), ('2014-06-20 15:40:22', u'http://www.codeforces.com/problemset/problem/4/A', u'Watermelon', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/4/submission/6926854'), ('2014-06-20 15:42:27', u'http://www.codeforces.com/problemset/problem/4/A', u'Watermelon', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/4/submission/6926866'), ('2014-06-20 16:19:41', u'http://www.codeforces.com/problemset/problem/158/A', u'Next Round', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/6927039'), ('2014-06-20 16:21:59', u'http://www.codeforces.com/problemset/problem/158/A', u'Next Round', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/6927057'), ('2014-06-20 16:35:40', u'http://www.codeforces.com/problemset/problem/158/A', u'Next Round', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/6927122'), ('2014-06-20 23:33:02', u'http://www.codeforces.com/problemset/problem/158/B', u'Taxi', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/6930033'), ('2014-06-20 23:46:50', u'http://www.codeforces.com/problemset/problem/158/B', u'Taxi', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/6930628'), ('2014-06-21 00:23:15', u'http://www.codeforces.com/problemset/problem/131/A', u'cAPS lOCK', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6930791'), ('2014-06-21 00:26:44', u'http://www.codeforces.com/problemset/problem/131/A', u'cAPS lOCK', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6930810'), ('2014-06-21 00:28:48', u'http://www.codeforces.com/problemset/problem/131/A', u'cAPS lOCK', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6930817'), ('2014-06-21 00:31:03', u'http://www.codeforces.com/problemset/problem/131/A', u'cAPS lOCK', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6930830'), ('2014-06-21 01:21:34', u'http://www.codeforces.com/problemset/problem/160/A', u'Twins', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/160/submission/6931006'), ('2014-06-21 01:24:10', u'http://www.codeforces.com/problemset/problem/160/A', u'Twins', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/160/submission/6931013'), ('2014-06-21 01:28:28', u'http://www.codeforces.com/problemset/problem/160/A', u'Twins', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/160/submission/6931031'), ('2014-06-21 01:42:08', u'http://www.codeforces.com/problemset/problem/131/C', u'The World is a Theatre', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6931087'), ('2014-06-21 01:55:26', u'http://www.codeforces.com/problemset/problem/131/C', u'The World is a Theatre', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6931137'), ('2014-06-21 01:58:07', u'http://www.codeforces.com/problemset/problem/131/C', u'The World is a Theatre', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6931156'), ('2014-06-21 01:59:17', u'http://www.codeforces.com/problemset/problem/131/C', u'The World is a Theatre', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6931160'), ('2014-06-21 02:02:30', u'http://www.codeforces.com/problemset/problem/131/C', u'The World is a Theatre', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6931170'), ('2014-06-21 02:04:53', u'http://www.codeforces.com/problemset/problem/131/C', u'The World is a Theatre', 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6931181'), ('2014-06-21 02:14:48', u'http://www.codeforces.com/problemset/problem/131/C', u'The World is a Theatre', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/131/submission/6931213'), ('2014-06-21 20:42:21', u'http://www.codeforces.com/problemset/problem/160/A', u'Twins', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/160/submission/6938158'), ('2014-06-28 01:04:59', u'http://www.codeforces.com/problemset/problem/268/B', u'Buttons', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/268/submission/6971649'), ('2014-06-28 02:06:43', u'http://www.codeforces.com/problemset/problem/37/A', u'Towers', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/37/submission/6971879'), ('2014-07-17 00:31:42', u'http://www.codeforces.com/problemset/problem/71/A', u'Way Too Long Words', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/71/submission/7118436'), ('2014-07-17 00:46:44', u'http://www.codeforces.com/problemset/problem/43/B', u'Letter', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/43/submission/7118520'), ('2014-07-24 15:36:56', u'http://www.codeforces.com/problemset/problem/447/A', u'DZY Loves Hash', 'CE', '0', u'GNU C++', 'http://www.codeforces.com/contest/447/submission/7215463'), ('2014-07-24 15:39:56', u'http://www.codeforces.com/problemset/problem/447/A', u'DZY Loves Hash', 'CE', '0', u'GNU C', 'http://www.codeforces.com/contest/447/submission/7215478'), ('2014-07-24 15:42:59', u'http://www.codeforces.com/problemset/problem/447/A', u'DZY Loves Hash', 'AC', '100', u'GNU C', 'http://www.codeforces.com/contest/447/submission/7215497'), ('2014-08-08 17:12:35', u'http://www.codeforces.com/problemset/problem/454/A', u'Little Pony and Crystal Mine', 'AC', '100', u'GNU C', 'http://www.codeforces.com/contest/454/submission/7375767'), ('2014-08-08 22:25:32', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'CE', '0', u'GNU C', 'http://www.codeforces.com/contest/456/submission/7391497'), ('2014-08-08 22:30:29', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'TLE', '0', u'GNU C', 'http://www.codeforces.com/contest/456/submission/7392085'), ('2014-08-10 01:55:39', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/456/submission/7408524'), ('2014-08-10 01:57:55', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/456/submission/7408534'), ('2014-08-10 02:03:27', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/456/submission/7408554'), ('2014-08-10 02:08:35', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/456/submission/7408575'), ('2014-08-10 02:18:38', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/456/submission/7408617'), ('2014-08-10 02:28:59', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/456/submission/7408646'), ('2014-08-31 16:22:26', u'http://www.codeforces.com/problemset/problem/87/A', u'Trains', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/87/submission/7653363'), ('2014-09-28 22:07:52', u'http://www.codeforces.com/problemset/problem/472/A', u'Design Tutorial: Learn from Math', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/472/submission/8007179'), ('2014-09-28 22:11:15', u'http://www.codeforces.com/problemset/problem/472/A', u'Design Tutorial: Learn from Math', 'AC', '100', u'GNU C', 'http://www.codeforces.com/contest/472/submission/8007515'), ('2014-09-28 23:07:59', u'http://www.codeforces.com/problemset/problem/472/B', u'Design Tutorial: Learn from Life', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/472/submission/8012494'), ('2014-09-28 23:24:42', u'http://www.codeforces.com/problemset/problem/472/B', u'Design Tutorial: Learn from Life', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/472/submission/8013925'), ('2014-09-28 23:32:59', u'http://www.codeforces.com/problemset/problem/472/B', u'Design Tutorial: Learn from Life', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/472/submission/8014748'), ('2014-09-29 02:27:25', u'http://www.codeforces.com/problemset/problem/472/B', u'Design Tutorial: Learn from Life', 'AC', '100', u'GNU C', 'http://www.codeforces.com/contest/472/submission/8017466'), ('2014-09-29 02:30:15', u'http://www.codeforces.com/problemset/problem/472/B', u'Design Tutorial: Learn from Life', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/472/submission/8017497'), ('2014-10-06 21:28:24', u'http://www.codeforces.com/problemset/problem/474/A', u'Keyboard', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/474/submission/8112225'), ('2014-10-06 21:34:57', u'http://www.codeforces.com/problemset/problem/474/A', u'Keyboard', 'AC', '100', u'GNU C', 'http://www.codeforces.com/contest/474/submission/8113048'), ('2014-10-06 23:10:09', u'http://www.codeforces.com/problemset/problem/474/B', u'Worms', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/474/submission/8120096'), ('2014-10-07 02:58:44', u'http://www.codeforces.com/problemset/problem/474/B', u'Worms', 'AC', '100', u'GNU C', 'http://www.codeforces.com/contest/474/submission/8123462'), ('2014-10-07 03:55:46', u'http://www.codeforces.com/problemset/problem/474/D', u'Flowers', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/474/submission/8123773'), ('2014-10-07 04:02:21', u'http://www.codeforces.com/problemset/problem/474/D', u'Flowers', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/474/submission/8123802'), ('2015-07-13 19:46:13', u'http://www.codeforces.com/problemset/problem/550/A', u'Two Substrings', 'CE', '0', u'GNU C', 'http://www.codeforces.com/contest/550/submission/12030270'), ('2015-07-13 19:46:47', u'http://www.codeforces.com/problemset/problem/550/A', u'Two Substrings', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/550/submission/12030276'), ('2015-07-13 20:00:28', u'http://www.codeforces.com/problemset/problem/550/A', u'Two Substrings', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/550/submission/12030404'), ('2015-07-13 20:22:36', u'http://www.codeforces.com/problemset/problem/550/B', u'Preparing Olympiad', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/550/submission/12030587'), ('2015-07-13 20:55:12', u'http://www.codeforces.com/problemset/problem/538/A', u'Cutting Banner', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/538/submission/12030895'), ('2015-07-13 20:56:42', u'http://www.codeforces.com/problemset/problem/538/A', u'Cutting Banner', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/538/submission/12030903'), ('2015-07-13 21:17:47', u'http://www.codeforces.com/problemset/problem/538/B', u'Quasi Binary', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/538/submission/12031083'), ('2015-07-13 21:32:43', u'http://www.codeforces.com/problemset/problem/538/B', u'Quasi Binary', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/538/submission/12031229'), ('2015-07-13 23:04:36', u'http://www.codeforces.com/problemset/problem/409/H', u'A + B Strikes Back', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/409/submission/12031995'), ('2015-07-13 23:07:06', u'http://www.codeforces.com/problemset/problem/409/H', u'A + B Strikes Back', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/409/submission/12032008'), ('2015-07-13 23:08:06', u'http://www.codeforces.com/problemset/problem/409/H', u'A + B Strikes Back', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/409/submission/12032015'), ('2015-07-13 23:08:45', u'http://www.codeforces.com/problemset/problem/409/H', u'A + B Strikes Back', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/409/submission/12032021'), ('2015-07-13 23:09:16', u'http://www.codeforces.com/problemset/problem/409/H', u'A + B Strikes Back', 'WA', '0', u'GNU C', 'http://www.codeforces.com/contest/409/submission/12032027'), ('2015-07-13 23:10:05', u'http://www.codeforces.com/problemset/problem/409/H', u'A + B Strikes Back', 'AC', '100', u'GNU C', 'http://www.codeforces.com/contest/409/submission/12032034'), ('2015-08-22 22:26:26', u'http://www.codeforces.com/problemset/problem/572/A', u'Arrays', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/572/submission/12650084'), ('2015-08-22 22:54:57', u'http://www.codeforces.com/problemset/problem/572/B', u'Order Book', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/572/submission/12655042'), ('2015-08-22 23:20:25', u'http://www.codeforces.com/problemset/problem/572/B', u'Order Book', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/572/submission/12658463'), ('2015-08-29 22:25:27', u'http://www.codeforces.com/problemset/problem/574/A', u'Bear and Elections', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/12750171'), ('2015-08-29 22:28:28', u'http://www.codeforces.com/problemset/problem/574/A', u'Bear and Elections', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/12750679'), ('2015-08-29 22:52:25', u'http://www.codeforces.com/problemset/problem/574/C', u'Bear and Poker', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/12754477'), ('2015-08-30 00:49:08', u'http://www.codeforces.com/problemset/problem/574/C', u'Bear and Poker', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/12765492'), ('2015-08-30 00:52:15', u'http://www.codeforces.com/problemset/problem/574/C', u'Bear and Poker', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/12765623'), ('2015-09-02 20:37:01', u'http://www.codeforces.com/problemset/problem/560/A', u'Currency System in Geraldion', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/560/submission/12817055'), ('2015-09-02 20:52:50', u'http://www.codeforces.com/problemset/problem/560/B', u'Gerald is into Art', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/560/submission/12817234'), ('2015-09-02 21:19:30', u'http://www.codeforces.com/problemset/problem/560/B', u'Gerald is into Art', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/560/submission/12817559'), ('2015-09-02 21:23:37', u'http://www.codeforces.com/problemset/problem/560/B', u'Gerald is into Art', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/560/submission/12817612'), ('2015-09-10 22:08:56', u'http://www.codeforces.com/problemset/problem/577/A', u'Multiplication Table', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/577/submission/12928002'), ('2015-09-10 22:57:34', u'http://www.codeforces.com/problemset/problem/577/C', u"Vasya and Petya's Game", 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/577/submission/12937380'), ('2015-09-10 23:24:19', u'http://www.codeforces.com/problemset/problem/577/C', u"Vasya and Petya's Game", 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/577/submission/12941164'), ('2015-09-10 23:35:13', u'http://www.codeforces.com/problemset/problem/577/C', u"Vasya and Petya's Game", 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/577/submission/12942378'), ('2015-09-18 09:26:35', u'http://www.codeforces.com/problemset/problem/574/B', u'Bear and Three Musketeers', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/13080029'), ('2015-09-18 09:35:11', u'http://www.codeforces.com/problemset/problem/574/B', u'Bear and Three Musketeers', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/13080083'), ('2015-09-18 09:40:54', u'http://www.codeforces.com/problemset/problem/574/B', u'Bear and Three Musketeers', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/13080104'), ('2015-09-18 09:50:57', u'http://www.codeforces.com/problemset/problem/574/B', u'Bear and Three Musketeers', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/13080162'), ('2015-09-18 10:57:39', u'http://www.codeforces.com/problemset/problem/574/B', u'Bear and Three Musketeers', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/574/submission/13080670'), ('2015-09-19 10:04:18', u'http://www.codeforces.com/problemset/problem/158/B', u'Taxi', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/13096185'), ('2015-09-19 10:06:16', u'http://www.codeforces.com/problemset/problem/158/B', u'Taxi', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/13096197'), ('2015-09-19 10:09:39', u'http://www.codeforces.com/problemset/problem/158/B', u'Taxi', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/13096220'), ('2015-09-19 10:13:38', u'http://www.codeforces.com/problemset/problem/158/B', u'Taxi', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/13096250'), ('2015-09-19 10:17:36', u'http://www.codeforces.com/problemset/problem/158/B', u'Taxi', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/158/submission/13096280'), ('2015-09-19 16:27:37', u'http://www.codeforces.com/problemset/problem/160/A', u'Twins', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/160/submission/13100273'), ('2015-09-19 17:17:56', u'http://www.codeforces.com/problemset/problem/550/C', u'Divisibility by Eight', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/550/submission/13100937'), ('2015-09-19 20:29:07', u'http://www.codeforces.com/problemset/problem/519/B', u'A and B and Compilation Errors', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/519/submission/13103565'), ('2015-09-20 08:58:02', u'http://www.codeforces.com/problemset/problem/204/B', u'Little Elephant and Cards', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/204/submission/13109387'), ('2015-09-20 09:05:26', u'http://www.codeforces.com/problemset/problem/204/B', u'Little Elephant and Cards', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/204/submission/13109421'), ('2015-09-20 09:10:19', u'http://www.codeforces.com/problemset/problem/204/B', u'Little Elephant and Cards', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/204/submission/13109436'), ('2015-09-20 09:15:40', u'http://www.codeforces.com/problemset/problem/204/B', u'Little Elephant and Cards', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/204/submission/13109456'), ('2015-09-20 09:19:16', u'http://www.codeforces.com/problemset/problem/204/B', u'Little Elephant and Cards', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/204/submission/13109467'), ('2015-09-22 22:07:10', u'http://www.codeforces.com/problemset/problem/580/A', u'Kefa and First Steps', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13145925'), ('2015-09-22 22:29:58', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13152519'), ('2015-09-22 23:18:24', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13162731'), ('2015-09-22 23:24:31', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13163770'), ('2015-09-22 23:25:35', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13163942'), ('2015-09-22 23:29:09', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13164502'), ('2015-09-23 00:49:34', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13171251'), ('2015-09-23 01:03:37', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13171838'), ('2015-09-23 01:38:14', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13172926'), ('2015-09-23 14:55:02', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13181387'), ('2015-09-23 18:14:51', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13185934'), ('2015-09-23 18:16:58', u'http://www.codeforces.com/problemset/problem/580/B', u'Kefa and Company', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13185991'), ('2015-09-23 19:08:23', u'http://www.codeforces.com/problemset/problem/580/C', u'Kefa and Park', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13187242'), ('2015-09-23 19:24:05', u'http://www.codeforces.com/problemset/problem/580/C', u'Kefa and Park', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13187823'), ('2015-09-23 19:30:09', u'http://www.codeforces.com/problemset/problem/580/C', u'Kefa and Park', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/13187946'), ('2015-09-27 19:40:44', u'http://www.codeforces.com/problemset/problem/4/C', u'Registration System', 'CE', '0', u'GNU C++', 'http://www.codeforces.com/contest/4/submission/13250390'), ('2015-09-27 19:41:55', u'http://www.codeforces.com/problemset/problem/4/C', u'Registration System', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/4/submission/13250410'), ('2015-09-27 21:19:48', u'http://www.codeforces.com/problemset/problem/159/C', u'String Manipulation 1.0', 'MLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/159/submission/13251760'), ('2015-09-28 14:34:58', u'http://www.codeforces.com/problemset/problem/581/A', u'Vasya the Hipster', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13260798'), ('2015-09-28 14:44:20', u'http://www.codeforces.com/problemset/problem/581/B', u'Luxurious Houses', 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13263305'), ('2015-09-28 14:56:03', u'http://www.codeforces.com/problemset/problem/581/C', u'Developing Skills', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13265626'), ('2015-09-28 15:17:41', u'http://www.codeforces.com/problemset/problem/581/C', u'Developing Skills', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13268882'), ('2015-09-29 12:10:51', u'http://www.codeforces.com/problemset/problem/581/B', u'Luxurious Houses', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13292365'), ('2015-09-29 12:22:40', u'http://www.codeforces.com/problemset/problem/581/B', u'Luxurious Houses', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13292509'), ('2015-09-29 12:34:16', u'http://www.codeforces.com/problemset/problem/581/C', u'Developing Skills', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13292656'), ('2015-09-29 12:43:38', u'http://www.codeforces.com/problemset/problem/581/C', u'Developing Skills', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13292768'), ('2015-09-29 12:47:20', u'http://www.codeforces.com/problemset/problem/581/B', u'Luxurious Houses', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13292809'), ('2015-09-29 12:48:18', u'http://www.codeforces.com/problemset/problem/581/C', u'Developing Skills', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13292817'), ('2015-09-29 13:10:59', u'http://www.codeforces.com/problemset/problem/581/C', u'Developing Skills', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13293101'), ('2015-09-29 13:32:07', u'http://www.codeforces.com/problemset/problem/581/C', u'Developing Skills', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13293354'), ('2015-09-29 17:43:48', u'http://www.codeforces.com/problemset/problem/581/C', u'Developing Skills', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/581/submission/13297010'), ('2015-09-29 20:59:18', u'http://www.codeforces.com/problemset/problem/263/A', u'Beautiful Matrix', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/263/submission/13300553'), ('2015-09-29 21:14:53', u'http://www.codeforces.com/problemset/problem/118/B', u'Present from Lena', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/118/submission/13300823'), ('2015-09-29 21:29:52', u'http://www.codeforces.com/problemset/problem/118/B', u'Present from Lena', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/118/submission/13301123'), ('2015-10-03 18:44:23', u'http://www.codeforces.com/problemset/problem/268/B', u'Buttons', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/268/submission/13359900'), ('2015-10-03 20:04:32', u'http://www.codeforces.com/problemset/problem/569/B', u'Inventory', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/569/submission/13360927'), ('2015-10-03 20:06:13', u'http://www.codeforces.com/problemset/problem/569/B', u'Inventory', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/569/submission/13360949'), ('2015-10-03 21:05:23', u'http://www.codeforces.com/problemset/problem/525/C', u'Ilya and Sticks', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/525/submission/13361790'), ('2015-10-03 21:06:58', u'http://www.codeforces.com/problemset/problem/525/C', u'Ilya and Sticks', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/525/submission/13361810'), ('2015-10-03 21:09:02', u'http://www.codeforces.com/problemset/problem/525/C', u'Ilya and Sticks', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/525/submission/13361836'), ('2015-10-03 22:25:41', u'http://www.codeforces.com/problemset/problem/583/A', u'Asphalting Roads', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/583/submission/13365272'), ('2015-10-03 23:30:49', u'http://www.codeforces.com/problemset/problem/583/B', u"Robot's Task", 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/583/submission/13378169'), ('2015-10-06 22:05:41', u'http://www.codeforces.com/problemset/problem/584/A', u'Olesya and Rodion', 'TLE', '0', u'Python 2', 'http://www.codeforces.com/contest/584/submission/13436363'), ('2015-10-06 22:17:59', u'http://www.codeforces.com/problemset/problem/584/A', u'Olesya and Rodion', 'WA', '0', u'Python 2', 'http://www.codeforces.com/contest/584/submission/13440624'), ('2015-10-06 22:24:51', u'http://www.codeforces.com/problemset/problem/584/A', u'Olesya and Rodion', 'CE', '0', u'GNU C++', 'http://www.codeforces.com/contest/584/submission/13442261'), ('2015-10-06 22:25:07', u'http://www.codeforces.com/problemset/problem/584/A', u'Olesya and Rodion', 'CE', '0', u'Python 2', 'http://www.codeforces.com/contest/584/submission/13442319'), ('2015-10-06 22:26:42', u'http://www.codeforces.com/problemset/problem/584/A', u'Olesya and Rodion', 'AC', '100', u'Python 2', 'http://www.codeforces.com/contest/584/submission/13442651'), ('2015-10-06 22:52:47', u'http://www.codeforces.com/problemset/problem/584/B', u'Kolya and Tanya ', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/584/submission/13447777'), ('2015-10-06 22:58:59', u'http://www.codeforces.com/problemset/problem/584/B', u'Kolya and Tanya ', 'AC', '100', u'Python 2', 'http://www.codeforces.com/contest/584/submission/13448876'), ('2015-10-06 23:14:57', u'http://www.codeforces.com/problemset/problem/584/C', u'Marina and Vasya', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/584/submission/13451585'), ('2015-10-06 23:35:46', u'http://www.codeforces.com/problemset/problem/584/C', u'Marina and Vasya', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/584/submission/13454813'), ('2015-10-06 23:44:55', u'http://www.codeforces.com/problemset/problem/584/C', u'Marina and Vasya', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/584/submission/13456081'), ('2015-10-07 01:04:27', u'http://www.codeforces.com/problemset/problem/584/B', u'Kolya and Tanya ', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/584/submission/13460503'), ('2015-10-07 18:02:31', u'http://www.codeforces.com/problemset/problem/584/A', u'Olesya and Rodion', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/584/submission/13473005'), ('2015-10-08 21:26:54', u'http://www.codeforces.com/problemset/problem/92/B', u'Binary Number', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/92/submission/13496730'), ('2015-10-09 01:22:57', u'http://www.codeforces.com/problemset/problem/456/A', u'Laptops', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/456/submission/13500243'), ('2015-10-09 01:35:03', u'http://www.codeforces.com/problemset/problem/52/A', u'123-sequence', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/52/submission/13500398'), ('2015-10-09 06:38:55', u'http://www.codeforces.com/problemset/problem/266/B', u'Queue at the School', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/266/submission/13502318'), ('2015-10-09 06:45:08', u'http://www.codeforces.com/problemset/problem/479/A', u'Expression', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/479/submission/13502351'), ('2015-10-09 06:46:35', u'http://www.codeforces.com/problemset/problem/479/A', u'Expression', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/479/submission/13502358'), ('2015-10-09 06:50:39', u'http://www.codeforces.com/problemset/problem/61/A', u'Ultra-Fast Mathematician', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/61/submission/13502387'), ('2015-10-09 07:03:29', u'http://www.codeforces.com/problemset/problem/462/B', u'Appleman and Card Game', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/462/submission/13502451'), ('2015-10-09 07:05:19', u'http://www.codeforces.com/problemset/problem/462/B', u'Appleman and Card Game', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/462/submission/13502463'), ('2015-10-09 07:06:54', u'http://www.codeforces.com/problemset/problem/462/B', u'Appleman and Card Game', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/462/submission/13502474'), ('2015-10-09 22:47:48', u'http://www.codeforces.com/problemset/problem/266/B', u'Queue at the School', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/266/submission/13514395'), ('2015-10-09 23:14:22', u'http://www.codeforces.com/problemset/problem/525/B', u'Pasha and String', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/525/submission/13514840'), ('2015-10-09 23:30:20', u'http://www.codeforces.com/problemset/problem/525/B', u'Pasha and String', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/525/submission/13515120'), ('2015-10-11 04:08:55', u'http://www.codeforces.com/problemset/problem/478/A', u'Initial Bet', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/478/submission/13538926'), ('2015-10-11 04:10:18', u'http://www.codeforces.com/problemset/problem/478/A', u'Initial Bet', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/478/submission/13538931'), ('2015-10-11 04:28:02', u'http://www.codeforces.com/problemset/problem/459/B', u'Pashmak and Flowers', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/459/submission/13538989'), ('2015-10-11 04:29:51', u'http://www.codeforces.com/problemset/problem/459/B', u'Pashmak and Flowers', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/459/submission/13538995'), ('2015-10-11 04:37:27', u'http://www.codeforces.com/problemset/problem/459/B', u'Pashmak and Flowers', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/459/submission/13539018'), ('2015-10-25 14:34:14', u'http://www.codeforces.com/problemset/problem/591/A', u"Wizards' Duel", 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/591/submission/13836193'), ('2015-10-25 14:50:25', u'http://www.codeforces.com/problemset/problem/591/B', u'Rebranding', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/591/submission/13839725'), ('2015-10-25 15:34:56', u'http://www.codeforces.com/problemset/problem/591/C', u'Median Smoothing', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/591/submission/13845641'), ('2015-10-25 15:38:20', u'http://www.codeforces.com/problemset/problem/591/C', u'Median Smoothing', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/591/submission/13846000'), ('2015-10-25 22:51:09', u'http://www.codeforces.com/problemset/problem/591/B', u'Rebranding', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/591/submission/13857177'), ('2015-10-25 23:23:19', u'http://www.codeforces.com/problemset/problem/591/B', u'Rebranding', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/591/submission/13857740'), ('2015-10-26 10:46:53', u'http://www.codeforces.com/problemset/problem/591/B', u'Rebranding', 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/591/submission/13866457'), ('2015-10-26 10:53:43', u'http://www.codeforces.com/problemset/problem/591/B', u'Rebranding', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/591/submission/13866518'), ('2015-10-26 19:50:00', u'http://www.codeforces.com/problemset/problem/160/B', u'Unlucky Ticket', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/160/submission/13873974'), ('2015-10-27 02:45:23', u'http://www.codeforces.com/problemset/problem/99/A', u'Help Far Away Kingdom', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/99/submission/13881024'), ('2015-10-27 03:13:34', u'http://www.codeforces.com/problemset/problem/12/B', u'Correct Solution?', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/12/submission/13881211'), ('2015-10-28 06:05:19', u'http://www.codeforces.com/problemset/problem/405/C', u'Unusual Product', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/405/submission/13906955'), ('2015-10-28 08:04:56', u'http://www.codeforces.com/problemset/problem/270/B', u'Multithreading', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/270/submission/13907587'), ('2015-10-28 21:42:49', u'http://www.codeforces.com/problemset/problem/525/C', u'Ilya and Sticks', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/525/submission/13918621'), ('2015-10-28 23:48:03', u'http://www.codeforces.com/problemset/problem/285/C', u'Building Permutation', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/285/submission/13920882'), ('2015-10-28 23:49:59', u'http://www.codeforces.com/problemset/problem/285/C', u'Building Permutation', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/285/submission/13920913'), ('2015-10-30 10:34:56', u'http://www.codeforces.com/problemset/problem/245/A', u'System Administrator', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/245/submission/13946807'), ('2015-10-30 10:49:01', u'http://www.codeforces.com/problemset/problem/102/B', u'Sum of Digits', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/102/submission/13946899'), ('2015-10-30 10:53:35', u'http://www.codeforces.com/problemset/problem/102/B', u'Sum of Digits', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/102/submission/13946926'), ('2015-10-31 22:14:30', u'http://www.codeforces.com/problemset/problem/592/A', u'PawnChess', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/592/submission/13975670'), ('2015-10-31 22:29:27', u'http://www.codeforces.com/problemset/problem/592/B', u'The Monster and the Squirrel', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/592/submission/13978806'), ('2015-10-31 22:58:55', u'http://www.codeforces.com/problemset/problem/592/C', u'The Big Race', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/592/submission/13983585'), ('2015-10-31 23:11:05', u'http://www.codeforces.com/problemset/problem/592/C', u'The Big Race', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/592/submission/13985339'), ('2015-11-01 01:46:31', u'http://www.codeforces.com/problemset/problem/592/C', u'The Big Race', 'WA', '0', u'Python 2', 'http://www.codeforces.com/contest/592/submission/13993129'), ('2015-11-01 02:00:03', u'http://www.codeforces.com/problemset/problem/592/C', u'The Big Race', 'WA', '0', u'Python 2', 'http://www.codeforces.com/contest/592/submission/13993447'), ('2015-11-01 02:04:32', u'http://www.codeforces.com/problemset/problem/592/C', u'The Big Race', 'WA', '0', u'Python 2', 'http://www.codeforces.com/contest/592/submission/13993623'), ('2015-11-01 10:48:24', u'http://www.codeforces.com/problemset/problem/592/A', u'PawnChess', 'CE', '0', u'Python 2', 'http://www.codeforces.com/contest/592/submission/14000480'), ('2015-11-01 10:48:46', u'http://www.codeforces.com/problemset/problem/592/A', u'PawnChess', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/592/submission/14000483'), ('2015-11-03 02:17:02', u'http://www.codeforces.com/problemset/problem/592/C', u'The Big Race', 'AC', '100', u'Python 2', 'http://www.codeforces.com/contest/592/submission/14033816'), ('2015-11-03 02:30:31', u'http://www.codeforces.com/problemset/problem/592/C', u'The Big Race', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/592/submission/14033957'), ('2015-11-04 14:58:56', u'http://www.codeforces.com/problemset/problem/339/B', u'Xenia and Ringroad', 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/339/submission/14054303'), ('2015-11-04 15:00:05', u'http://www.codeforces.com/problemset/problem/339/B', u'Xenia and Ringroad', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/339/submission/14054317'), ('2015-11-04 15:29:08', u'http://www.codeforces.com/problemset/problem/11/A', u'Increasing Sequence', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/11/submission/14054735'), ('2015-11-04 16:30:38', u'http://www.codeforces.com/problemset/problem/567/A', u'Lineland Mail', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/567/submission/14055720'), ('2015-11-05 10:34:36', u'http://www.codeforces.com/problemset/problem/593/A', u'2Char', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/593/submission/14082176'), ('2015-11-06 21:20:07', u'http://www.codeforces.com/problemset/problem/159/C', u'String Manipulation 1.0', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/159/submission/14109516'), ('2015-11-06 21:47:19', u'http://www.codeforces.com/problemset/problem/159/C', u'String Manipulation 1.0', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/159/submission/14109921'), ('2015-11-08 22:05:35', u'http://www.codeforces.com/problemset/problem/595/A', u'Vitaly and Night', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/595/submission/14145703'), ('2015-11-08 22:44:17', u'http://www.codeforces.com/problemset/problem/595/B', u'Pasha and Phone', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/595/submission/14150515'), ('2015-11-08 23:28:37', u'http://www.codeforces.com/problemset/problem/595/B', u'Pasha and Phone', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/595/submission/14155293'), ('2015-11-16 01:07:14', u'http://www.codeforces.com/problemset/problem/596/A', u'Wilbur and Swimming Pool', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/596/submission/14288508'), ('2015-11-16 01:09:02', u'http://www.codeforces.com/problemset/problem/596/B', u'Wilbur and Array', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/596/submission/14288537'), ('2015-11-16 01:16:40', u'http://www.codeforces.com/problemset/problem/596/A', u'Wilbur and Swimming Pool', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/596/submission/14288651'), ('2015-11-16 01:17:38', u'http://www.codeforces.com/problemset/problem/596/A', u'Wilbur and Swimming Pool', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/596/submission/14288673'), ('2015-12-01 21:15:25', u'http://www.codeforces.com/problemset/problem/604/A', u'Uncowed Forces', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/604/submission/14587410'), ('2015-12-01 21:21:57', u'http://www.codeforces.com/problemset/problem/604/A', u'Uncowed Forces', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/604/submission/14588907'), ('2015-12-01 21:25:25', u'http://www.codeforces.com/problemset/problem/604/A', u'Uncowed Forces', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/604/submission/14589670'), ('2015-12-01 21:50:29', u'http://www.codeforces.com/problemset/problem/604/B', u'More Cowbell', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/604/submission/14593977'), ('2015-12-09 21:53:15', u'http://www.codeforces.com/problemset/problem/606/C', u'Sorting Railway Cars', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/606/submission/14718869'), ('2015-12-09 22:14:26', u'http://www.codeforces.com/problemset/problem/606/C', u'Sorting Railway Cars', 'HCK', '-50', u'GNU C++', 'http://www.codeforces.com/contest/606/submission/14722405'), ('2015-12-09 22:44:59', u'http://www.codeforces.com/problemset/problem/606/A', u'Magic Spheres', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/606/submission/14726450'), ('2015-12-09 22:55:27', u'http://www.codeforces.com/problemset/problem/606/A', u'Magic Spheres', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/606/submission/14727619'), ('2015-12-09 22:58:11', u'http://www.codeforces.com/problemset/problem/606/A', u'Magic Spheres', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/606/submission/14727938'), ('2015-12-09 23:00:38', u'http://www.codeforces.com/problemset/problem/606/A', u'Magic Spheres', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/606/submission/14728208'), ('2015-12-15 21:36:55', u'http://www.codeforces.com/problemset/problem/580/A', u'Kefa and First Steps', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/580/submission/14817821'), ('2015-12-17 18:01:21', u'http://www.codeforces.com/problemset/problem/598/B', u'Queries on a String', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/598/submission/14845709'), ('2015-12-17 18:09:23', u'http://www.codeforces.com/problemset/problem/598/B', u'Queries on a String', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/598/submission/14845795'), ('2015-12-17 18:55:21', u'http://www.codeforces.com/problemset/problem/597/A', u'Divisibility', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/597/submission/14846361'), ('2015-12-17 18:56:54', u'http://www.codeforces.com/problemset/problem/597/A', u'Divisibility', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/597/submission/14846374'), ('2015-12-17 19:02:03', u'http://www.codeforces.com/problemset/problem/597/A', u'Divisibility', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/597/submission/14846436'), ('2015-12-17 19:05:46', u'http://www.codeforces.com/problemset/problem/597/A', u'Divisibility', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/597/submission/14846492'), ('2015-12-22 22:54:31', u'http://www.codeforces.com/problemset/problem/609/B', u'\u041a\u043d\u0438\u0433\u0430 - \u043b\u0443\u0447\u0448\u0438\u0439 \u043f\u043e\u0434\u0430\u0440\u043e\u043a', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/609/submission/14928518'), ('2015-12-23 01:45:32', u'http://www.codeforces.com/problemset/problem/609/C', u'Load Balancing', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/609/submission/14930319'), ('2015-12-23 01:48:44', u'http://www.codeforces.com/problemset/problem/609/C', u'Load Balancing', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/609/submission/14930347'), ('2015-12-23 02:12:32', u'http://www.codeforces.com/problemset/problem/609/C', u'Load Balancing', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/609/submission/14930527'), ('2015-12-23 02:14:12', u'http://www.codeforces.com/problemset/problem/609/C', u'Load Balancing', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/609/submission/14930545'), ('2015-12-24 03:46:52', u'http://www.codeforces.com/problemset/problem/608/A', u'Saitama Destroys Hotel', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/608/submission/14961192'), ('2015-12-24 03:56:12', u'http://www.codeforces.com/problemset/problem/600/B', u'Queries about less or equal elements', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/600/submission/14961257'), ('2015-12-24 04:11:24', u'http://www.codeforces.com/problemset/problem/600/A', u'Extract Numbers', 'AC', '100', u'PyPy 2', 'http://www.codeforces.com/contest/600/submission/14961343'), ('2015-12-26 00:19:54', u'http://www.codeforces.com/problemset/problem/600/B', u'Queries about less or equal elements', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/600/submission/15021384'), ('2015-12-31 02:06:51', u'http://www.codeforces.com/problemset/problem/611/A', u'New Year and Days', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/611/submission/15129041'), ('2015-12-31 02:07:53', u'http://www.codeforces.com/problemset/problem/611/A', u'New Year and Days', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/611/submission/15129051'), ('2015-12-31 02:39:02', u'http://www.codeforces.com/problemset/problem/611/B', u'New Year and Old Property', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/611/submission/15129360'), ('2016-01-01 00:08:10', u'http://www.codeforces.com/problemset/problem/611/B', u'New Year and Old Property', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/611/submission/15140290'), ('2016-01-02 01:17:28', u'http://www.codeforces.com/problemset/problem/610/A', u'Pasha and Stick', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/610/submission/15152467'), ('2016-01-02 02:05:01', u'http://www.codeforces.com/problemset/problem/610/B', u'Vika and Squares', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/610/submission/15152883'), ('2016-01-05 11:52:15', u'http://www.codeforces.com/problemset/problem/189/A', u'Cut Ribbon', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/189/submission/15187913'), ('2016-01-05 12:26:38', u'http://www.codeforces.com/problemset/problem/489/C', u'Given Length and Sum of Digits...', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/489/submission/15188193'), ('2016-01-06 20:03:28', u'http://www.codeforces.com/problemset/problem/570/C', u'Replacement', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/570/submission/15208011'), ('2016-01-06 20:09:17', u'http://www.codeforces.com/problemset/problem/570/C', u'Replacement', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/570/submission/15208096'), ('2016-01-09 14:53:09', u'http://www.codeforces.com/problemset/problem/615/A', u'Bulbs', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/615/submission/15266906'), ('2016-01-14 22:12:10', u'http://www.codeforces.com/problemset/problem/614/A', u'Link/Cut Tree', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15348242'), ('2016-01-14 22:19:51', u'http://www.codeforces.com/problemset/problem/614/A', u'Link/Cut Tree', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15350653'), ('2016-01-14 22:26:04', u'http://www.codeforces.com/problemset/problem/614/A', u'Link/Cut Tree', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15352533'), ('2016-01-14 22:45:52', u'http://www.codeforces.com/problemset/problem/614/B', u"Gena's Code", 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15357739'), ('2016-01-14 22:49:49', u'http://www.codeforces.com/problemset/problem/614/B', u"Gena's Code", 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15358770'), ('2016-01-14 23:13:26', u'http://www.codeforces.com/problemset/problem/614/B', u"Gena's Code", 'TLE', '0', u'PyPy 2', 'http://www.codeforces.com/contest/614/submission/15364083'), ('2016-01-14 23:17:00', u'http://www.codeforces.com/problemset/problem/614/A', u'Link/Cut Tree', 'HCK', '-50', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15364825'), ('2016-01-15 01:46:02', u'http://www.codeforces.com/problemset/problem/614/A', u'Link/Cut Tree', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15376622'), ('2016-01-15 01:50:32', u'http://www.codeforces.com/problemset/problem/614/A', u'Link/Cut Tree', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15376775'), ('2016-01-15 02:04:58', u'http://www.codeforces.com/problemset/problem/614/B', u"Gena's Code", 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/614/submission/15377119'), ('2016-01-31 01:01:03', u'http://www.codeforces.com/problemset/problem/618/A', u'Slime Combining', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/618/submission/15684756'), ('2016-01-31 01:44:18', u'http://www.codeforces.com/problemset/problem/618/B', u'Guess the Permutation', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/618/submission/15685235'), ('2016-02-01 07:17:18', u'http://www.codeforces.com/problemset/problem/621/A', u'Wet Shark and Odd and Even', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/621/submission/15722644'), ('2016-02-01 07:40:26', u'http://www.codeforces.com/problemset/problem/621/B', u'Wet Shark and Bishops', 'CE', '0', u'GNU C++', 'http://www.codeforces.com/contest/621/submission/15722848'), ('2016-02-01 07:40:45', u'http://www.codeforces.com/problemset/problem/621/B', u'Wet Shark and Bishops', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/621/submission/15722852'), ('2016-02-01 07:59:16', u'http://www.codeforces.com/problemset/problem/621/B', u'Wet Shark and Bishops', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/621/submission/15723041'), ('2016-02-01 08:01:58', u'http://www.codeforces.com/problemset/problem/621/B', u'Wet Shark and Bishops', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/621/submission/15723074'), ('2016-02-01 08:05:42', u'http://www.codeforces.com/problemset/problem/621/B', u'Wet Shark and Bishops', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/621/submission/15723107'), ('2016-02-01 08:07:51', u'http://www.codeforces.com/problemset/problem/621/B', u'Wet Shark and Bishops', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/621/submission/15723123'), ('2016-02-22 00:05:38', u'http://www.codeforces.com/problemset/problem/629/A', u'Far Relative\u2019s Birthday Cake', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/629/submission/16265987'), ('2016-02-28 19:19:12', u'http://www.codeforces.com/problemset/problem/629/B', u'Far Relative\u2019s Problem', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/629/submission/16404240'), ('2016-02-28 20:35:59', u'http://www.codeforces.com/problemset/problem/630/C', u'Lucky Numbers', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/630/submission/16405407'), ('2016-02-28 20:37:18', u'http://www.codeforces.com/problemset/problem/630/C', u'Lucky Numbers', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/630/submission/16405419'), ('2016-02-28 20:41:06', u'http://www.codeforces.com/problemset/problem/630/C', u'Lucky Numbers', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/630/submission/16405456'), ('2016-07-31 00:44:37', u'http://www.codeforces.com/problemset/problem/699/B', u'One Bomb', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/699/submission/19524584'), ('2016-07-31 00:49:29', u'http://www.codeforces.com/problemset/problem/699/B', u'One Bomb', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/699/submission/19524679'), ('2016-07-31 00:58:14', u'http://www.codeforces.com/problemset/problem/699/B', u'One Bomb', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/699/submission/19524873'), ('2016-07-31 18:30:30', u'http://www.codeforces.com/problemset/problem/699/B', u'One Bomb', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/699/submission/19538526'), ('2016-07-31 18:49:30', u'http://www.codeforces.com/problemset/problem/699/B', u'One Bomb', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/699/submission/19538834'), ('2016-07-31 19:01:53', u'http://www.codeforces.com/problemset/problem/699/A', u'Launch of Collider', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/699/submission/19539062'), ('2016-07-31 20:11:24', u'http://www.codeforces.com/problemset/problem/701/A', u'Cards', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/701/submission/19540208'), ('2016-07-31 20:35:26', u'http://www.codeforces.com/problemset/problem/701/B', u'Cells Not Under Attack', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/701/submission/19540595'), ('2016-07-31 20:39:11', u'http://www.codeforces.com/problemset/problem/701/B', u'Cells Not Under Attack', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/701/submission/19540660'), ('2016-08-02 03:12:36', u'http://www.codeforces.com/problemset/problem/702/A', u'Maximum Increase', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/702/submission/19568636'), ('2016-08-02 03:15:28', u'http://www.codeforces.com/problemset/problem/702/A', u'Maximum Increase', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/702/submission/19568664'), ('2016-08-02 03:16:08', u'http://www.codeforces.com/problemset/problem/702/A', u'Maximum Increase', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/702/submission/19568668'), ('2016-08-02 03:23:31', u'http://www.codeforces.com/problemset/problem/702/B', u'Powers of Two', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/702/submission/19568738'), ('2016-08-02 03:25:16', u'http://www.codeforces.com/problemset/problem/702/B', u'Powers of Two', 'TLE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/702/submission/19568745'), ('2016-08-04 20:47:23', u'http://www.codeforces.com/problemset/problem/703/A', u'Mishka and Game', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/703/submission/19617826'), ('2016-08-04 20:49:28', u'http://www.codeforces.com/problemset/problem/703/A', u'Mishka and Game', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/703/submission/19619139'), ('2016-08-04 21:22:13', u'http://www.codeforces.com/problemset/problem/703/B', u'Mishka and trip', 'SK', '0', u'GNU C++11', 'http://www.codeforces.com/contest/703/submission/19624817'), ('2016-08-04 22:36:40', u'http://www.codeforces.com/problemset/problem/703/B', u'Mishka and trip', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/703/submission/19633551'), ('2016-08-05 01:11:14', u'http://www.codeforces.com/problemset/problem/703/B', u'Mishka and trip', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/703/submission/19638245'), ('2016-08-08 15:49:19', u'http://www.codeforces.com/problemset/problem/705/A', u'Hulk', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/19725753'), ('2016-08-08 18:25:13', u'http://www.codeforces.com/problemset/problem/705/B', u'Spider Man', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/19728563'), ('2016-08-11 22:10:00', u'http://www.codeforces.com/problemset/problem/706/A', u'Beru-taxi', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/706/submission/19788500'), ('2016-08-11 22:19:02', u'http://www.codeforces.com/problemset/problem/706/B', u'Interesting drink', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/706/submission/19792085'), ('2016-08-11 22:29:00', u'http://www.codeforces.com/problemset/problem/706/B', u'Interesting drink', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/706/submission/19794671'), ('2016-08-11 22:41:28', u'http://www.codeforces.com/problemset/problem/706/B', u'Interesting drink', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/706/submission/19797228'), ('2016-08-12 01:49:03', u'http://www.codeforces.com/problemset/problem/706/A', u'Beru-taxi', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/706/submission/19812426'), ('2016-08-12 02:19:20', u'http://www.codeforces.com/problemset/problem/706/A', u'Beru-taxi', 'CE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/706/submission/19813299'), ('2016-08-12 02:22:25', u'http://www.codeforces.com/problemset/problem/706/A', u'Beru-taxi', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/706/submission/19813362'), ('2016-08-14 19:27:06', u'http://www.codeforces.com/problemset/problem/702/B', u'Powers of Two', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/702/submission/19869883'), ('2016-08-14 20:27:13', u'http://www.codeforces.com/problemset/problem/706/C', u'Hard problem', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/706/submission/19870767'), ('2016-08-15 04:49:12', u'http://www.codeforces.com/problemset/problem/706/D', u"Vasiliy's Multiset", 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/706/submission/19877506'), ('2016-08-15 04:55:02', u'http://www.codeforces.com/problemset/problem/706/D', u"Vasiliy's Multiset", 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/706/submission/19877543'), ('2016-08-15 06:38:23', u'http://www.codeforces.com/problemset/problem/706/D', u"Vasiliy's Multiset", 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/706/submission/19878193'), ('2016-08-17 22:37:54', u'http://www.codeforces.com/problemset/problem/706/D', u"Vasiliy's Multiset", 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/706/submission/19932138'), ('2016-08-20 15:22:16', u'http://www.codeforces.com/problemset/problem/29/C', u'Mail Stamps', 'CE', '0', u'GNU C++', 'http://www.codeforces.com/contest/29/submission/19979318'), ('2016-08-20 15:22:44', u'http://www.codeforces.com/problemset/problem/29/C', u'Mail Stamps', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/29/submission/19979332'), ('2016-08-20 16:20:32', u'http://www.codeforces.com/problemset/problem/637/B', u'Chat Order', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/637/submission/19980245'), ('2016-08-20 16:22:06', u'http://www.codeforces.com/problemset/problem/637/B', u'Chat Order', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/637/submission/19980267'), ('2016-08-20 16:25:04', u'http://www.codeforces.com/problemset/problem/637/B', u'Chat Order', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/637/submission/19980309'), ('2016-08-20 17:25:07', u'http://www.codeforces.com/problemset/problem/622/C', u'Not Equal on a Segment', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/622/submission/19981265'), ('2016-08-20 17:30:50', u'http://www.codeforces.com/problemset/problem/622/C', u'Not Equal on a Segment', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/622/submission/19981354'), ('2016-08-20 18:39:54', u'http://www.codeforces.com/problemset/problem/707/A', u"Brain's Photos", 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/707/submission/19983584'), ('2016-08-20 19:05:41', u'http://www.codeforces.com/problemset/problem/707/B', u'Bakery', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/707/submission/19990875'), ('2016-08-21 02:49:44', u'http://www.codeforces.com/problemset/problem/707/C', u'Pythagorean Triples', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/707/submission/20013751'), ('2016-08-24 06:34:33', u'http://www.codeforces.com/problemset/problem/710/B', u'Optimal Point on a Line', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/710/submission/20096202'), ('2016-08-24 06:44:27', u'http://www.codeforces.com/problemset/problem/710/B', u'Optimal Point on a Line', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/710/submission/20096285'), ('2016-08-24 06:49:56', u'http://www.codeforces.com/problemset/problem/710/A', u'King Moves', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/710/submission/20096337'), ('2016-08-24 06:58:51', u'http://www.codeforces.com/problemset/problem/710/C', u'Magic Odd Square', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/710/submission/20096421'), ('2016-08-24 07:05:26', u'http://www.codeforces.com/problemset/problem/710/C', u'Magic Odd Square', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/710/submission/20096477'), ('2016-08-24 07:07:46', u'http://www.codeforces.com/problemset/problem/710/C', u'Magic Odd Square', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/710/submission/20096494'), ('2016-08-25 05:52:47', u'http://www.codeforces.com/problemset/problem/709/A', u'Juicer', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/709/submission/20140096'), ('2016-08-25 06:01:00', u'http://www.codeforces.com/problemset/problem/709/C', u'Letters Cyclic Shift', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/709/submission/20140181'), ('2016-08-25 06:04:24', u'http://www.codeforces.com/problemset/problem/709/C', u'Letters Cyclic Shift', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/709/submission/20140220'), ('2016-08-25 06:05:03', u'http://www.codeforces.com/problemset/problem/709/C', u'Letters Cyclic Shift', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/709/submission/20140228'), ('2016-08-25 15:38:47', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'TLE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/20150798'), ('2016-08-25 17:26:47', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/20152979'), ('2016-08-25 17:28:05', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/20153009'), ('2016-08-25 17:29:43', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/20153046'), ('2016-08-25 17:33:09', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/20153146'), ('2016-08-25 17:35:27', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/20153204'), ('2016-08-25 17:40:33', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/705/submission/20153304'), ('2016-08-25 17:41:24', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20153316'), ('2016-08-25 17:47:30', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20153471'), ('2016-08-25 17:50:56', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20153564'), ('2016-08-25 17:52:06', u'http://www.codeforces.com/problemset/problem/704/A', u'Thor', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/704/submission/20153599'), ('2016-08-25 17:53:50', u'http://www.codeforces.com/problemset/problem/704/A', u'Thor', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/704/submission/20153653'), ('2016-08-25 17:59:43', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20153767'), ('2016-08-25 18:03:16', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20153836'), ('2016-08-25 18:05:03', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20153878'), ('2016-08-25 18:09:01', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20153955'), ('2016-08-25 18:10:53', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20154001'), ('2016-08-25 18:13:15', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20154058'), ('2016-08-25 18:15:21', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20154102'), ('2016-08-25 18:16:40', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20154129'), ('2016-08-25 18:23:26', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20154295'), ('2016-08-25 18:24:26', u'http://www.codeforces.com/problemset/problem/705/C', u'Thor', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/705/submission/20154322'), ('2016-08-29 10:50:32', u'http://www.codeforces.com/problemset/problem/710/C', u'Magic Odd Square', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/710/submission/20222968'), ('2016-08-29 17:43:28', u'http://www.codeforces.com/problemset/problem/711/A', u'Bus to Udayland', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/711/submission/20230874'), ('2016-08-29 17:48:47', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/711/submission/20232719'), ('2016-08-29 17:55:00', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20234607'), ('2016-08-29 18:08:37', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20238630'), ('2016-08-29 18:11:38', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20239424'), ('2016-08-29 18:21:50', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20241874'), ('2016-08-29 18:36:36', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20245231'), ('2016-08-29 18:50:27', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'CE', '0', u'GNU C++', 'http://www.codeforces.com/contest/711/submission/20247880'), ('2016-08-29 18:50:49', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20247939'), ('2016-08-29 21:34:54', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20256999'), ('2016-08-29 22:47:30', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20260046'), ('2016-08-29 22:49:03', u'http://www.codeforces.com/problemset/problem/711/B', u'Chris and Magic Square', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/711/submission/20260094'), ('2016-09-03 21:04:35', u'http://www.codeforces.com/problemset/problem/510/B', u'Fox And Two Dots', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/510/submission/20365149'), ('2016-09-03 22:14:53', u'http://www.codeforces.com/problemset/problem/129/B', u'Students and Shoelaces', 'RE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/129/submission/20366343'), ('2016-09-03 22:19:35', u'http://www.codeforces.com/problemset/problem/129/B', u'Students and Shoelaces', 'RE', '0', u'GNU C++', 'http://www.codeforces.com/contest/129/submission/20366460'), ('2016-09-03 23:04:55', u'http://www.codeforces.com/problemset/problem/129/B', u'Students and Shoelaces', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/129/submission/20367405'), ('2016-09-03 23:09:28', u'http://www.codeforces.com/problemset/problem/129/B', u'Students and Shoelaces', 'TLE', '0', u'GNU C++', 'http://www.codeforces.com/contest/129/submission/20367491'), ('2016-09-03 23:44:56', u'http://www.codeforces.com/problemset/problem/129/B', u'Students and Shoelaces', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/129/submission/20368170'), ('2016-09-03 23:54:28', u'http://www.codeforces.com/problemset/problem/129/B', u'Students and Shoelaces', 'WA', '0', u'GNU C++', 'http://www.codeforces.com/contest/129/submission/20368355'), ('2016-09-03 23:58:44', u'http://www.codeforces.com/problemset/problem/129/B', u'Students and Shoelaces', 'AC', '100', u'GNU C++', 'http://www.codeforces.com/contest/129/submission/20368443'), ('2016-09-04 00:00:06', u'http://www.codeforces.com/problemset/problem/300/B', u'Coach', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/300/submission/20368473'), ('2016-09-04 00:00:23', u'http://www.codeforces.com/problemset/problem/129/B', u'Students and Shoelaces', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/129/submission/20368478'), ('2016-09-04 00:57:23', u'http://www.codeforces.com/problemset/problem/300/B', u'Coach', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/300/submission/20369438'), ('2016-09-04 01:05:04', u'http://www.codeforces.com/problemset/problem/300/B', u'Coach', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/300/submission/20369545'), ('2016-09-04 01:14:44', u'http://www.codeforces.com/problemset/problem/300/B', u'Coach', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/300/submission/20369700'), ('2016-09-05 05:28:45', u'http://www.codeforces.com/problemset/problem/602/C', u'The Two Routes', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/602/submission/20391156'), ('2016-09-12 19:52:06', u'http://www.codeforces.com/problemset/problem/712/A', u'Memory and Crow', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/712/submission/20550755'), ('2016-09-12 20:01:01', u'http://www.codeforces.com/problemset/problem/712/B', u'Memory and Trident', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/712/submission/20550916'), ('2016-09-12 20:50:03', u'http://www.codeforces.com/problemset/problem/712/C', u'Memory and De-Evolution', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/712/submission/20551627'), ('2016-09-12 21:16:55', u'http://www.codeforces.com/problemset/problem/712/C', u'Memory and De-Evolution', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/712/submission/20552025'), ('2016-09-17 22:47:03', u'http://www.codeforces.com/problemset/problem/716/A', u'Crazy Computer', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/716/submission/20714780'), ('2016-09-17 23:47:10', u'http://www.codeforces.com/problemset/problem/716/B', u'Complete the Word', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/716/submission/20716899'), ('2016-09-17 23:48:25', u'http://www.codeforces.com/problemset/problem/716/B', u'Complete the Word', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/716/submission/20716935'), ('2016-09-21 18:03:35', u'http://www.codeforces.com/problemset/problem/716/B', u'Complete the Word', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/716/submission/20794436'), ('2016-09-23 18:38:43', u'http://www.codeforces.com/problemset/problem/719/A', u'Vitya in the Countryside', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20837817'), ('2016-09-23 18:40:43', u'http://www.codeforces.com/problemset/problem/719/A', u'Vitya in the Countryside', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20838364'), ('2016-09-23 18:42:38', u'http://www.codeforces.com/problemset/problem/719/A', u'Vitya in the Countryside', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20839135'), ('2016-09-23 18:44:24', u'http://www.codeforces.com/problemset/problem/719/A', u'Vitya in the Countryside', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20840020'), ('2016-09-23 18:45:54', u'http://www.codeforces.com/problemset/problem/719/A', u'Vitya in the Countryside', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20840715'), ('2016-09-23 18:56:54', u'http://www.codeforces.com/problemset/problem/719/B', u'Anatoly and Cockroaches', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20845155'), ('2016-09-23 19:24:28', u'http://www.codeforces.com/problemset/problem/719/C', u'Efim and Strange Grade', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20852944'), ('2016-09-23 19:30:10', u'http://www.codeforces.com/problemset/problem/719/C', u'Efim and Strange Grade', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20854218'), ('2016-09-23 19:46:36', u'http://www.codeforces.com/problemset/problem/719/C', u'Efim and Strange Grade', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20857522'), ('2016-09-23 19:49:31', u'http://www.codeforces.com/problemset/problem/719/C', u'Efim and Strange Grade', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20858071'), ('2016-09-23 20:02:28', u'http://www.codeforces.com/problemset/problem/719/C', u'Efim and Strange Grade', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/719/submission/20860429'), ('2016-10-02 00:45:43', u'http://www.codeforces.com/problemset/problem/721/A', u'One-dimensional Japanese Crossword', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/721/submission/21096198'), ('2016-10-02 00:56:47', u'http://www.codeforces.com/problemset/problem/721/B', u'Passwords', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/721/submission/21096352'), ('2016-10-02 01:22:26', u'http://www.codeforces.com/problemset/problem/721/B', u'Passwords', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/721/submission/21096748'), ('2016-10-02 16:21:19', u'http://www.codeforces.com/problemset/problem/722/A', u'Broken Clock', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/722/submission/21112277'), ('2016-10-02 16:23:01', u'http://www.codeforces.com/problemset/problem/722/A', u'Broken Clock', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/722/submission/21112319'), ('2016-10-02 16:54:23', u'http://www.codeforces.com/problemset/problem/722/B', u'Verse Pattern', 'WA', '0', u'GNU C++11', 'http://www.codeforces.com/contest/722/submission/21113003'), ('2016-10-02 16:56:42', u'http://www.codeforces.com/problemset/problem/722/B', u'Verse Pattern', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/722/submission/21113067'), ('2016-10-05 02:06:14', u'http://www.codeforces.com/problemset/problem/723/A', u'The New Year: Meeting Friends', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/723/submission/21196305'), ('2016-10-05 02:24:17', u'http://www.codeforces.com/problemset/problem/723/B', u'Text Document Analysis', 'AC', '100', u'Python 2', 'http://www.codeforces.com/contest/723/submission/21196529'), ('2017-03-06 05:02:38', u'http://www.codeforces.com/problemset/problem/723/B', u'Text Document Analysis', 'AC', '100', u'Python 2', 'http://www.codeforces.com/contest/723/submission/25275840'), ('2017-03-06 05:03:10', u'http://www.codeforces.com/problemset/problem/723/B', u'Text Document Analysis', 'WA', '0', u'Python 2', 'http://www.codeforces.com/contest/723/submission/25275845'), ('2017-03-06 05:07:21', u'http://www.codeforces.com/problemset/problem/429/B', u'Working out', 'TLE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/429/submission/25275894'), ('2017-03-06 05:08:26', u'http://www.codeforces.com/problemset/problem/429/B', u'Working out', 'AC', '100', u'GNU C++11', 'http://www.codeforces.com/contest/429/submission/25275906'), ('2017-03-06 05:15:08', u'http://www.codeforces.com/problemset/problem/429/B', u'Working out', 'CE', '0', u'GNU C++11', 'http://www.codeforces.com/contest/429/submission/25275955'), ('2018-03-01 04:19:28', u'http://www.codeforces.com/problemset/problem/577/A', u'Multiplication Table', 'RE', '0', u'Python 2', 'http://www.codeforces.com/contest/577/submission/35797975'), ('2018-03-01 04:19:47', u'http://www.codeforces.com/problemset/problem/577/A', u'Multiplication Table', 'AC', '100', u'Python 3', 'http://www.codeforces.com/contest/577/submission/35797984')],
"Spoj": [('2013-08-09 16:13:01', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'CE', '0', u'ADA95', ''), ('2013-08-09 16:13:19', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'RE', '0', u'C', ''), ('2013-08-09 16:13:50', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'RE', '0', u'C', ''), ('2013-08-09 16:15:24', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'RE', '0', u'C', ''), ('2013-08-12 10:48:56', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'CE', '0', u'ADA95', ''), ('2013-08-12 10:50:14', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'CE', '0', u'ADA95', ''), ('2013-08-13 19:11:24', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'WA', '0', u'C', ''), ('2013-08-13 19:11:50', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'WA', '0', u'C', ''), ('2015-03-24 05:08:29', 'https://www.spoj.com/problems/TEST/', u'Life, the Universe, and Everything', 'AC', '100', u'C', ''), ('2015-03-28 00:47:43', 'https://www.spoj.com/problems/NSTEPS/', u'Number Steps', 'AC', '100', u'C++', ''), ('2015-06-30 03:38:17', 'https://www.spoj.com/problems/FCTRL/', u'Factorial', 'AC', '100', u'CPP', ''), ('2015-06-30 03:41:12', 'https://www.spoj.com/problems/FCTRL/', u'Factorial', 'AC', '100', u'CPP', ''), ('2015-06-30 03:42:49', 'https://www.spoj.com/problems/FCTRL/', u'Factorial', 'AC', '100', u'CPP', ''), ('2015-06-30 04:00:12', 'https://www.spoj.com/problems/FCTRL2/', u'Small factorials', 'AC', '100', u'C', ''), ('2015-06-30 04:16:14', 'https://www.spoj.com/problems/SAMER08F/', u'Feynman', 'AC', '100', u'CPP', ''), ('2015-06-30 04:58:12', 'https://www.spoj.com/problems/LASTDIG/', u'The last digit', 'AC', '100', u'CPP', ''), ('2015-07-25 17:08:08', 'https://www.spoj.com/problems/FARIDA/', u'Princess Farida', 'WA', '0', u'CPP', ''), ('2015-07-25 17:11:03', 'https://www.spoj.com/problems/FARIDA/', u'Princess Farida', 'WA', '0', u'CPP', ''), ('2015-07-25 17:15:01', 'https://www.spoj.com/problems/FARIDA/', u'Princess Farida', 'AC', '100', u'CPP', ''), ('2015-09-26 21:01:26', 'https://www.spoj.com/problems/MUL/', u'Fast Multiplication', 'TLE', '0', u'C++', ''), ('2015-09-26 21:04:40', 'https://www.spoj.com/problems/MUL/', u'Fast Multiplication', 'AC', '100', u'PYTHON', ''), ('2015-12-05 08:37:26', 'https://www.spoj.com/problems/PRIME1/', u'Prime Generator', 'WA', '0', u'C', ''), ('2017-05-15 17:07:43', 'https://www.spoj.com/problems/PRIME1/', u'Prime Generator', 'WA', '0', u'C', ''), ('2018-10-02 23:41:30', 'https://www.spoj.com/problems/ONP/', u'Transform the Expression', 'WA', '0', u'CPP', ''), ('2019-05-26 22:58:02', 'https://www.spoj.com/problems/BACTERIA/', u'SPOJ Custom Test', 'OTH', '0', u'PYTHON3', '')],
"HackerEarth": [('2014-06-17 14:50:52', 'https://www.hackerearth.com/practice/data-structures/hash-tables/basics-of-hash-tables/practice-problems/algorithm/mind-palaces-3/', u'Mind Palaces', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/333758'), ('2014-06-17 14:55:06', 'https://www.hackerearth.com/practice/data-structures/hash-tables/basics-of-hash-tables/practice-problems/algorithm/mind-palaces-3/', u'Mind Palaces', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/333766'), ('2014-06-17 14:56:59', 'https://www.hackerearth.com/practice/data-structures/hash-tables/basics-of-hash-tables/practice-problems/algorithm/mind-palaces-3/', u'Mind Palaces', 'PS', '0', u'C', 'https://www.hackerearth.com/submission/333770'), ('2014-06-17 15:38:24', 'https://www.hackerearth.com/practice/data-structures/hash-tables/basics-of-hash-tables/practice-problems/algorithm/mind-palaces-3/', u'Mind Palaces', 'PS', '0', u'C', 'https://www.hackerearth.com/submission/333824'), ('2014-06-17 15:53:23', 'https://www.hackerearth.com/practice/data-structures/hash-tables/basics-of-hash-tables/practice-problems/algorithm/mind-palaces-3/', u'Mind Palaces', 'PS', '0', u'C', 'https://www.hackerearth.com/submission/333833'), ('2014-06-17 16:08:55', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/palindromic-numbers-7/', u'Palindromic Numbers', 'AC', '100', u'Python', 'https://www.hackerearth.com/submission/333846'), ('2014-10-02 04:57:34', 'https://www.hackerearth.com/problem/algorithm/day-1-if-else-conditionslooping/', u"Bajirao's Rescue Operation", 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/789146'), ('2014-10-02 05:00:56', 'https://www.hackerearth.com/problem/algorithm/day-1-if-else-conditionslooping/', u"Bajirao's Rescue Operation", 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/789152'), ('2014-10-02 05:20:08', 'https://www.hackerearth.com/problem/algorithm/day-1-if-else-conditionslooping/', u"Bajirao's Rescue Operation", 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/789161'), ('2014-10-02 05:40:22', 'https://www.hackerearth.com/problem/algorithm/day-1-if-else-conditionslooping/', u"Bajirao's Rescue Operation", 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/789173'), ('2014-10-02 05:40:22', 'https://www.hackerearth.com/problem/algorithm/day-1-if-else-conditionslooping/', u"Bajirao's Rescue Operation", 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/789173'), ('2014-10-02 05:40:23', 'https://www.hackerearth.com/problem/algorithm/day-1-if-else-conditionslooping/', u"Bajirao's Rescue Operation", 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/789174'), ('2014-10-02 05:43:40', 'https://www.hackerearth.com/problem/algorithm/day-1-if-else-conditionslooping/', u"Bajirao's Rescue Operation", 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/789180'), ('2014-10-02 05:43:40', 'https://www.hackerearth.com/problem/algorithm/day-1-if-else-conditionslooping/', u"Bajirao's Rescue Operation", 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/789181'), ('2014-10-02 05:51:40', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/complete-string-4/', u'Complete String', 'TLE', '0', u'C++', 'https://www.hackerearth.com/submission/789184'), ('2014-10-02 06:01:47', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/complete-string-4/', u'Complete String', 'TLE', '0', u'C++', 'https://www.hackerearth.com/submission/789187'), ('2014-10-02 06:07:25', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/complete-string-4/', u'Complete String', 'AC', '100', u'C', 'https://www.hackerearth.com/submission/789191'), ('2015-05-30 21:46:15', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/recursive-sums/', u'Recursive Sums', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1866870'), ('2015-05-30 21:47:45', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/recursive-sums/', u'Recursive Sums', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1866905'), ('2015-05-30 21:52:07', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/recursive-sums/', u'Recursive Sums', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1867017'), ('2015-05-30 21:58:10', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/recursive-sums/', u'Recursive Sums', 'AC', '100', u'Python', 'https://www.hackerearth.com/submission/1867183'), ('2015-06-01 21:51:41', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/very-cool-numbers/', u'Very Cool Numbers', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1876428'), ('2015-06-01 22:07:31', 'https://www.hackerearth.com/problem/algorithm/children-love-candies/', u'Children Love Candies', 'PS', '0', u'C', 'https://www.hackerearth.com/submission/1877240'), ('2015-06-01 22:09:05', 'https://www.hackerearth.com/problem/algorithm/children-love-candies/', u'Children Love Candies', 'AC', '100', u'C', 'https://www.hackerearth.com/submission/1877330'), ('2015-06-01 22:18:48', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/very-cool-numbers/', u'Very Cool Numbers', 'PS', '0', u'Python', 'https://www.hackerearth.com/submission/1877835'), ('2015-06-01 22:23:44', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/very-cool-numbers/', u'Very Cool Numbers', 'AC', '100', u'Python', 'https://www.hackerearth.com/submission/1878092'), ('2015-06-01 22:33:08', 'https://www.hackerearth.com/problem/algorithm/andrew-and-max/', u'Andrew and Max', 'AC', '100', u'C', 'https://www.hackerearth.com/submission/1878567'), ('2015-06-01 22:55:56', 'https://www.hackerearth.com/problem/algorithm/zeroshark/', u'ZeroShark', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1879759'), ('2015-06-01 23:11:57', 'https://www.hackerearth.com/problem/algorithm/zeroshark/', u'ZeroShark', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1880558'), ('2015-06-01 23:17:34', 'https://www.hackerearth.com/problem/algorithm/zeroshark/', u'ZeroShark', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1880825'), ('2015-06-04 21:02:21', 'https://www.hackerearth.com/practice/algorithms/string-algorithm/basics-of-string-manipulation/practice-problems/algorithm/terrible-chandu/', u'Terrible Chandu', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/1894925'), ('2015-06-04 21:02:21', 'https://www.hackerearth.com/practice/algorithms/string-algorithm/basics-of-string-manipulation/practice-problems/algorithm/terrible-chandu/', u'Terrible Chandu', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/1894925'), ('2015-06-04 21:06:29', 'https://www.hackerearth.com/practice/algorithms/greedy/basics-of-greedy-algorithms/practice-problems/algorithm/chandu-and-consecutive-letters/', u'Chandu and Consecutive Letters', 'AC', '100', u'C', 'https://www.hackerearth.com/submission/1895133'), ('2015-06-04 21:10:59', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/prateek-and-his-friends/', u'Prateek and his Friends', 'AC', '100', u'C', 'https://www.hackerearth.com/submission/1895359'), ('2015-06-09 21:03:35', 'https://www.hackerearth.com/practice/algorithms/sorting/merge-sort/practice-problems/algorithm/chandu-and-his-girlfriend/', u'Chandu and his Girlfriend', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/1919932'), ('2015-06-09 21:07:37', 'https://www.hackerearth.com/practice/algorithms/sorting/merge-sort/practice-problems/algorithm/chandu-and-his-girlfriend-returns/', u'Chandu and his Girlfriend Returns', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1920040'), ('2015-06-09 21:12:29', 'https://www.hackerearth.com/practice/algorithms/sorting/merge-sort/practice-problems/algorithm/chandu-and-his-girlfriend-returns/', u'Chandu and his Girlfriend Returns', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1920191'), ('2015-06-09 21:18:14', 'https://www.hackerearth.com/practice/algorithms/sorting/merge-sort/practice-problems/algorithm/chandu-and-his-girlfriend-returns/', u'Chandu and his Girlfriend Returns', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/1920367'), ('2015-06-11 21:05:52', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/discover-the-monk/', u'Discover the Monk', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1930370'), ('2015-06-11 21:09:45', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/discover-the-monk/', u'Discover the Monk', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1930499'), ('2015-06-11 21:15:07', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/discover-the-monk/', u'Discover the Monk', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/1930694'), ('2015-06-11 21:28:20', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/monks-encounter-with-polynomial/', u"Monk's Encounter with Polynomial", 'WA', '0', u'C', 'https://www.hackerearth.com/submission/1931189'), ('2015-06-11 21:28:38', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/monks-encounter-with-polynomial/', u"Monk's Encounter with Polynomial", 'PS', '0', u'C', 'https://www.hackerearth.com/submission/1931196'), ('2015-06-11 21:29:06', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/monks-encounter-with-polynomial/', u"Monk's Encounter with Polynomial", 'PS', '0', u'C', 'https://www.hackerearth.com/submission/1931215'), ('2015-06-11 21:30:47', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/monks-encounter-with-polynomial/', u"Monk's Encounter with Polynomial", 'PS', '0', u'C', 'https://www.hackerearth.com/submission/1931281'), ('2015-06-11 21:32:24', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/monks-encounter-with-polynomial/', u"Monk's Encounter with Polynomial", 'PS', '0', u'C', 'https://www.hackerearth.com/submission/1931332'), ('2015-06-11 21:34:35', 'https://www.hackerearth.com/practice/algorithms/searching/binary-search/practice-problems/algorithm/monks-encounter-with-polynomial/', u"Monk's Encounter with Polynomial", 'PS', '0', u'C', 'https://www.hackerearth.com/submission/1931416'), ('2015-07-01 21:36:39', 'https://www.hackerearth.com/practice/algorithms/sorting/insertion-sort/practice-problems/algorithm/the-rise-of-the-weird-things-1/', u'The rise of the weird... things [1]', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2037234'), ('2015-07-01 21:39:00', 'https://www.hackerearth.com/practice/algorithms/sorting/insertion-sort/practice-problems/algorithm/the-rise-of-the-weird-things-1/', u'The rise of the weird... things [1]', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2037359'), ('2015-07-01 22:06:20', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/the-savior-3/', u'The savior? [3]', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2038727'), ('2015-07-01 22:14:10', 'https://www.hackerearth.com/practice/basic-programming/implementation/basics-of-implementation/practice-problems/algorithm/the-savior-3/', u'The savior? [3]', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2039043'), ('2015-07-01 23:06:28', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/2-dimensional/practice-problems/algorithm/supernatural-squad-2/', u'Supernatural Squad [2]', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2040873'), ('2015-07-01 23:06:28', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/2-dimensional/practice-problems/algorithm/supernatural-squad-2/', u'Supernatural Squad [2]', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2040873'), ('2015-07-01 23:08:23', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/2-dimensional/practice-problems/algorithm/supernatural-squad-2/', u'Supernatural Squad [2]', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2040928'), ('2015-07-01 23:10:56', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/2-dimensional/practice-problems/algorithm/supernatural-squad-2/', u'Supernatural Squad [2]', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2041005'), ('2015-07-03 18:28:59', 'https://www.hackerearth.com/problem/algorithm/valentine-shopping-4/', u'Valentine Shopping', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2053959'), ('2015-07-03 18:48:11', 'https://www.hackerearth.com/challenges/hiring/bookmyshowhiringchallenge/algorithm/marut-and-girls/', u'Marut and Girls', 'PS', '0', u'Python', 'https://www.hackerearth.com/submission/2054041'), ('2015-07-03 18:48:11', 'https://www.hackerearth.com/challenges/hiring/bookmyshowhiringchallenge/algorithm/marut-and-girls/', u'Marut and Girls', 'PS', '0', u'Python', 'https://www.hackerearth.com/submission/2054042'), ('2015-07-03 18:51:55', 'https://www.hackerearth.com/challenges/hiring/bookmyshowhiringchallenge/algorithm/marut-and-girls/', u'Marut and Girls', 'PS', '0', u'Python', 'https://www.hackerearth.com/submission/2054062'), ('2015-07-03 18:57:12', 'https://www.hackerearth.com/challenges/hiring/bookmyshowhiringchallenge/algorithm/marut-and-girls/', u'Marut and Girls', 'AC', '100', u'Python', 'https://www.hackerearth.com/submission/2054105'), ('2015-07-03 18:57:12', 'https://www.hackerearth.com/challenges/hiring/bookmyshowhiringchallenge/algorithm/marut-and-girls/', u'Marut and Girls', 'AC', '100', u'Python', 'https://www.hackerearth.com/submission/2054106'), ('2015-07-03 21:37:13', 'https://www.hackerearth.com/problem/algorithm/beta-testing/', u'Beta Testing', 'WA', '0', u'Python', 'https://www.hackerearth.com/submission/2055210'), ('2015-07-03 22:22:51', 'https://www.hackerearth.com/problem/algorithm/beta-testing/', u'Beta Testing', 'AC', '100', u'Python', 'https://www.hackerearth.com/submission/2055901'), ('2015-07-04 12:55:07', 'https://www.hackerearth.com/practice/algorithms/graphs/graph-representation/practice-problems/algorithm/monk-in-the-real-estate/', u'Monk in the real estate', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2059508'), ('2015-07-06 22:30:59', 'https://www.hackerearth.com/problem/algorithm/beta-testing/', u'Beta Testing', 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/2071774'), ('2015-07-06 22:48:05', 'https://www.hackerearth.com/problem/algorithm/beta-testing/', u'Beta Testing', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2071820'), ('2015-07-06 23:04:59', 'https://www.hackerearth.com/problem/algorithm/to-be-changed-choosing-a-project/', u'Side Projects', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2071872'), ('2015-07-06 23:30:34', 'https://www.hackerearth.com/problem/algorithm/to-be-changed-compile-time-fun/', u"It's Compiling!", 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2071940'), ('2015-07-08 23:20:31', 'https://www.hackerearth.com/problem/algorithm/monk-and-the-collision/', u'Monk and the Collision', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2082091'), ('2015-07-08 23:21:06', 'https://www.hackerearth.com/problem/algorithm/monk-and-the-collision/', u'Monk and the Collision', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2082114'), ('2015-07-08 23:36:27', 'https://www.hackerearth.com/problem/algorithm/monk-in-the-land-of-pokemons/', u'Monk in the land of Pokemons!', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2082452'), ('2015-07-08 23:38:45', 'https://www.hackerearth.com/problem/algorithm/monk-in-the-land-of-pokemons/', u'Monk in the land of Pokemons!', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2082465'), ('2015-07-08 23:50:39', 'https://www.hackerearth.com/problem/algorithm/monk-in-the-land-of-pokemons/', u'Monk in the land of Pokemons!', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2082564'), ('2015-07-08 23:50:39', 'https://www.hackerearth.com/problem/algorithm/monk-in-the-land-of-pokemons/', u'Monk in the land of Pokemons!', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2082564'), ('2015-07-18 07:29:31', 'https://www.hackerearth.com/problem/algorithm/will-you-be-my-friend-pledge-easy/', u'Will you be my friend?', 'CE', '0', u'Java', 'https://www.hackerearth.com/submission/2144171'), ('2015-07-18 07:54:12', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/intelligent-girl-1/', u'Intelligent Girl ', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2144180'), ('2015-07-23 12:47:19', 'https://www.hackerearth.com/practice/data-structures/trees/heapspriority-queues/practice-problems/algorithm/monk-and-multiplication/', u'Monk and Multiplication', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2181397'), ('2015-07-23 12:48:32', 'https://www.hackerearth.com/practice/data-structures/trees/heapspriority-queues/practice-problems/algorithm/monk-and-multiplication/', u'Monk and Multiplication', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2181405'), ('2015-07-23 13:45:20', 'https://www.hackerearth.com/challenges/competitive/code-monk-heaps-and-priority-queues/algorithm/monk-and-some-queries/', u'Monk And Some Queries', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2181589'), ('2015-07-23 13:52:48', 'https://www.hackerearth.com/challenges/competitive/code-monk-heaps-and-priority-queues/algorithm/monk-and-some-queries/', u'Monk And Some Queries', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2181611'), ('2015-07-23 14:01:15', 'https://www.hackerearth.com/challenges/competitive/code-monk-heaps-and-priority-queues/algorithm/monk-and-some-queries/', u'Monk And Some Queries', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2181643'), ('2015-07-23 14:08:45', 'https://www.hackerearth.com/challenges/competitive/code-monk-heaps-and-priority-queues/algorithm/monk-and-some-queries/', u'Monk And Some Queries', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2181659'), ('2015-07-23 14:12:17', 'https://www.hackerearth.com/challenges/competitive/code-monk-heaps-and-priority-queues/algorithm/monk-and-some-queries/', u'Monk And Some Queries', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2181670'), ('2015-07-23 14:16:03', 'https://www.hackerearth.com/challenges/competitive/code-monk-heaps-and-priority-queues/algorithm/monk-and-some-queries/', u'Monk And Some Queries', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2181686'), ('2015-07-23 14:17:49', 'https://www.hackerearth.com/challenges/competitive/code-monk-heaps-and-priority-queues/algorithm/monk-and-some-queries/', u'Monk And Some Queries', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2181696'), ('2015-08-15 19:54:58', 'https://www.hackerearth.com/practice/algorithms/graphs/graph-representation/practice-problems/algorithm/monk-in-the-real-estate/', u'Monk in the real estate', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2326114'), ('2015-08-15 20:05:30', 'https://www.hackerearth.com/practice/algorithms/graphs/graph-representation/practice-problems/algorithm/monk-at-the-graph-factory/', u'Monk at the Graph Factory', 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/2326217'), ('2015-08-15 20:07:06', 'https://www.hackerearth.com/practice/algorithms/graphs/graph-representation/practice-problems/algorithm/monk-at-the-graph-factory/', u'Monk at the Graph Factory', 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/2326232'), ('2015-08-15 20:17:21', 'https://www.hackerearth.com/practice/algorithms/graphs/graph-representation/practice-problems/algorithm/monk-at-the-graph-factory/', u'Monk at the Graph Factory', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2326300'), ('2015-08-15 20:57:56', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/2326601'), ('2015-08-15 21:10:36', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2326699'), ('2015-08-15 21:13:03', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2326714'), ('2015-08-15 21:15:52', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2326727'), ('2015-08-15 21:20:43', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2326762'), ('2015-08-15 21:20:43', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2326762'), ('2015-08-15 21:27:49', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'RE', '0', u'C++', 'https://www.hackerearth.com/submission/2326799'), ('2015-08-15 21:28:47', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2326811'), ('2015-08-15 21:42:24', 'https://www.hackerearth.com/practice/algorithms/graphs/depth-first-search/practice-problems/algorithm/kingdom-of-monkeys/', u'Kingdom Of Monkeys', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2326907'), ('2015-08-28 01:03:17', 'https://www.hackerearth.com/practice/data-structures/disjoint-data-strutures/basics-of-disjoint-data-structures/practice-problems/algorithm/city-and-flood-1/', u'City and Flood', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/2400169'), ('2015-09-03 19:34:56', 'https://www.hackerearth.com/problem/algorithm/guess-the-triangle/', u'Guess the triangle', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/2449157'), ('2015-12-18 12:28:32', 'https://www.hackerearth.com/problem/algorithm/prime-probablity-1/', u'Prime Probablity', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/3031761'), ('2015-12-18 12:33:00', 'https://www.hackerearth.com/problem/algorithm/prime-probablity-1/', u'Prime Probablity', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/3031774'), ('2015-12-18 12:46:11', 'https://www.hackerearth.com/problem/algorithm/prime-probablity-1/', u'Prime Probablity', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/3031821'), ('2015-12-18 12:54:19', 'https://www.hackerearth.com/problem/algorithm/prime-probablity-1/', u'Prime Probablity', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/3031840'), ('2015-12-18 22:25:48', 'https://www.hackerearth.com/problem/algorithm/special-subarray-1/', u'Special Subarray', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/3035335'), ('2015-12-18 22:31:43', 'https://www.hackerearth.com/problem/algorithm/special-subarray-1/', u'Special Subarray', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/3035367'), ('2015-12-20 10:59:00', 'https://www.hackerearth.com/problem/algorithm/prime-probablity-1/', u'Prime Probablity', 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/3050348'), ('2016-01-06 23:37:02', 'https://www.hackerearth.com/problem/algorithm/digital-numbers/', u'Digital Numbers', 'WA', '0', u'C', 'https://www.hackerearth.com/submission/3120602'), ('2016-09-14 22:25:52', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/xsquare-and-two-arrays/', u'Xsquare And Two Arrays', 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/5167117'), ('2016-09-14 22:26:45', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/xsquare-and-two-arrays/', u'Xsquare And Two Arrays', 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/5167122'), ('2016-09-14 22:46:04', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/xsquare-and-two-arrays/', u'Xsquare And Two Arrays', 'WA', '0', u'C++', 'https://www.hackerearth.com/submission/5167266'), ('2016-09-14 22:50:24', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/xsquare-and-two-arrays/', u'Xsquare And Two Arrays', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/5167320'), ('2016-09-29 21:25:56', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/choosing-the-judges-7/', u'Choosing the Judges', 'AC', '100', u'C++', 'https://www.hackerearth.com/submission/5421843'), ('2016-09-29 22:05:06', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/rhezo-and-prime-problems/', u'Rhezo and Prime Problems', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/5422329'), ('2016-09-29 22:16:01', 'https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/rhezo-and-prime-problems/', u'Rhezo and Prime Problems', 'PS', '0', u'C++', 'https://www.hackerearth.com/submission/5422459')],
"HackerRank": [('2014-06-09 22:53:13', u'https://www.hackerrank.com/challenges/solve-me-first', u'Solve Me First', 'AC', '100', '-', ''), ('2014-06-09 23:03:21', u'https://www.hackerrank.com/challenges/find-point', u'Find the Point', 'AC', '100', '-', ''), ('2014-06-09 23:40:25', u'https://www.hackerrank.com/challenges/lonely-integer', u'Lonely Integer', 'AC', '100', '-', ''), ('2014-06-10 00:08:01', u'https://www.hackerrank.com/challenges/the-love-letter-mystery', u'The Love-Letter Mystery', 'AC', '100', '-', ''), ('2014-07-17 02:38:05', u'https://www.hackerrank.com/challenges/utopian-tree', u'Utopian Tree', 'AC', '100', '-', ''), ('2014-07-17 03:11:48', u'https://www.hackerrank.com/contests/w7/challenges/die-hard-3', u'Die Hard 3', 'AC', '100', '-', ''), ('2014-07-17 03:24:54', u'https://www.hackerrank.com/challenges/runningtime', u'Running Time of Algorithms', 'AC', '100', '-', ''), ('2014-07-17 03:49:56', u'https://www.hackerrank.com/contests/w7/challenges/string-function-calculation', u'String Function Calculation', 'AC', '100', '-', ''), ('2014-07-22 01:29:21', u'https://www.hackerrank.com/challenges/gem-stones', u'Gemstones', 'AC', '100', '-', ''), ('2014-08-08 17:24:20', u'https://www.hackerrank.com/contests/w8/challenges/counter-game', u'Counter game', 'AC', '100', '-', ''), ('2014-09-24 01:29:10', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler052', u'Project Euler #52: Permuted multiples', 'AC', '100', '-', ''), ('2014-09-27 20:48:27', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler001', u'Project Euler #1: Multiples of 3 and 5', 'AC', '100', '-', ''), ('2014-09-27 22:39:27', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler002', u'Project Euler #2: Even Fibonacci numbers', 'AC', '100', '-', ''), ('2014-09-28 00:53:48', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler016', u'Project Euler #16: Power digit sum', 'AC', '100', '-', ''), ('2014-09-28 03:59:31', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler034', u'Project Euler #34: Digit factorials', 'AC', '100', '-', ''), ('2014-10-01 19:47:25', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler042', u'Project Euler #42: Coded triangle numbers', 'AC', '100', '-', ''), ('2014-10-01 20:06:36', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler030', u'Project Euler #30: Digit Nth powers', 'AC', '100', '-', ''), ('2014-10-02 22:39:43', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler048', u'Project Euler #48: Self powers', 'AC', '100', '-', ''), ('2014-10-02 22:55:27', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler020', u'Project Euler #20: Factorial digit sum', 'AC', '100', '-', ''), ('2014-10-04 00:35:02', u'https://www.hackerrank.com/challenges/bigger-is-greater', u'Bigger is Greater', 'AC', '100', '-', ''), ('2014-10-04 05:36:38', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler005', u'Project Euler #5: Smallest multiple', 'AC', '100', '-', ''), ('2014-10-04 05:45:06', u'https://www.hackerrank.com/contests/projecteuler/challenges/euler007', u'Project Euler #7: 10001st prime', 'AC', '100', '-', ''), ('2014-12-08 06:00:42', u'https://www.hackerrank.com/challenges/find-hackerrank', u'Find HackerRank', 'AC', '100', '-', ''), ('2014-12-08 06:08:01', u'https://www.hackerrank.com/challenges/valid-pan-format', u'Valid PAN format', 'AC', '100', '-', ''), ('2014-12-08 06:17:05', u'https://www.hackerrank.com/challenges/hackerrank-tweets', u'HackerRank Tweets', 'AC', '100', '-', ''), ('2014-12-08 06:31:09', u'https://www.hackerrank.com/challenges/split-number', u'Split the Phone Numbers', 'AC', '100', '-', ''), ('2015-05-29 07:50:36', u'https://www.hackerrank.com/challenges/select-all-sql', u'Select All', 'AC', '100', '-', ''), ('2015-05-29 07:52:08', u'https://www.hackerrank.com/challenges/select-by-id', u'Select By ID', 'AC', '100', '-', ''), ('2015-05-29 07:53:21', u'https://www.hackerrank.com/challenges/japanese-cities-attributes', u"Japanese Cities' Attributes", 'AC', '100', '-', ''), ('2015-05-29 07:54:43', u'https://www.hackerrank.com/challenges/japanese-cities-name', u"Japanese Cities' Names", 'AC', '100', '-', ''), ('2015-05-29 07:57:45', u'https://www.hackerrank.com/challenges/average-population', u'Average Population', 'AC', '100', '-', ''), ('2015-05-29 07:59:00', u'https://www.hackerrank.com/challenges/japan-population', u'Japan Population', 'AC', '100', '-', ''), ('2015-05-30 09:47:34', u'https://www.hackerrank.com/challenges/py-hello-world', u'Say "Hello, World!" With Python', 'AC', '100', '-', ''), ('2015-05-30 09:48:41', u'https://www.hackerrank.com/challenges/python-raw-input', u'Reading Raw Input', 'AC', '100', '-', ''), ('2015-05-30 09:50:03', u'https://www.hackerrank.com/challenges/python-arithmetic-operators', u'Arithmetic Operators', 'AC', '100', '-', ''), ('2015-05-30 09:53:02', u'https://www.hackerrank.com/challenges/python-division', u'Python: Division', 'AC', '100', '-', ''), ('2015-05-30 09:55:01', u'https://www.hackerrank.com/challenges/python-mod-divmod', u'Mod Divmod', 'AC', '100', '-', ''), ('2015-05-30 22:23:33', u'https://www.hackerrank.com/contests/code-cpp-may-2015/challenges/redundant-or-not', u'Redundant or Not?', 'AC', '100', '-', ''), ('2015-05-30 22:31:57', u'https://www.hackerrank.com/contests/code-cpp-may-2015/challenges/string-transformations', u'String Transformations', 'AC', '100', '-', ''), ('2015-05-31 08:52:13', u'https://www.hackerrank.com/contests/code-cpp-may-2015/challenges/linked-list-to-binary', u'Linked List to Binary', 'AC', '100', '-', ''), ('2015-05-31 09:20:17', u'https://www.hackerrank.com/contests/code-cpp-may-2015/challenges/polygon-inheritance', u'Polygon Inheritance', 'AC', '100', '-', ''), ('2015-06-01 06:19:47', u'https://www.hackerrank.com/challenges/print-the-elements-of-a-linked-list', u'Print the Elements of a Linked List', 'AC', '100', '-', ''), ('2015-06-01 06:22:43', u'https://www.hackerrank.com/challenges/insert-a-node-at-the-tail-of-a-linked-list', u'Insert a Node at the Tail of a Linked List', 'AC', '100', '-', ''), ('2015-06-01 06:24:34', u'https://www.hackerrank.com/challenges/insert-a-node-at-the-head-of-a-linked-list', u'Insert a node at the head of a linked list', 'AC', '100', '-', ''), ('2015-06-01 06:45:45', u'https://www.hackerrank.com/challenges/insert-a-node-at-a-specific-position-in-a-linked-list', u'Insert a node at a specific position in a linked list', 'AC', '100', '-', ''), ('2015-06-01 06:49:29', u'https://www.hackerrank.com/challenges/delete-a-node-from-a-linked-list', u'Delete a Node', 'AC', '100', '-', ''), ('2015-06-01 06:51:09', u'https://www.hackerrank.com/challenges/print-the-elements-of-a-linked-list-in-reverse', u'Print in Reverse', 'AC', '100', '-', ''), ('2015-06-01 06:56:24', u'https://www.hackerrank.com/challenges/reverse-a-linked-list', u'Reverse a linked list', 'AC', '100', '-', ''), ('2015-06-01 06:59:39', u'https://www.hackerrank.com/challenges/compare-two-linked-lists', u'Compare two linked lists', 'AC', '100', '-', ''), ('2015-06-01 07:07:07', u'https://www.hackerrank.com/challenges/merge-two-sorted-linked-lists', u'Merge two sorted linked lists', 'AC', '100', '-', ''), ('2015-06-01 07:12:02', u'https://www.hackerrank.com/challenges/get-the-value-of-the-node-at-a-specific-position-from-the-tail', u'Get Node Value', 'AC', '100', '-', ''), ('2015-06-01 07:18:57', u'https://www.hackerrank.com/challenges/delete-duplicate-value-nodes-from-a-sorted-linked-list', u'Delete duplicate-value nodes from a sorted linked list', 'AC', '100', '-', ''), ('2015-06-01 07:25:20', u'https://www.hackerrank.com/challenges/detect-whether-a-linked-list-contains-a-cycle', u'Cycle Detection', 'AC', '100', '-', ''), ('2015-06-01 07:39:03', u'https://www.hackerrank.com/challenges/find-the-merge-point-of-two-joined-linked-lists', u'Find Merge Point of Two Lists', 'AC', '100', '-', ''), ('2015-06-01 07:55:58', u'https://www.hackerrank.com/challenges/insert-a-node-into-a-sorted-doubly-linked-list', u'Inserting a Node Into a Sorted Doubly Linked List', 'AC', '100', '-', ''), ('2015-06-01 08:05:55', u'https://www.hackerrank.com/challenges/reverse-a-doubly-linked-list', u'Reverse a doubly linked list', 'AC', '100', '-', ''), ('2015-06-01 08:07:24', u'https://www.hackerrank.com/challenges/tree-preorder-traversal', u'Tree: Preorder Traversal', 'AC', '100', '-', ''), ('2015-06-01 08:09:21', u'https://www.hackerrank.com/challenges/tree-postorder-traversal', u'Tree: Postorder Traversal', 'AC', '100', '-', ''), ('2015-06-01 08:10:09', u'https://www.hackerrank.com/challenges/tree-inorder-traversal', u'Tree: Inorder Traversal', 'AC', '100', '-', ''), ('2015-06-03 03:08:32', u'https://www.hackerrank.com/challenges/connecting-towns', u'Connecting Towns', 'AC', '100', '-', ''), ('2015-06-03 03:13:31', u'https://www.hackerrank.com/challenges/handshake', u'Handshake', 'AC', '100', '-', ''), ('2015-06-03 03:17:17', u'https://www.hackerrank.com/challenges/correctness-invariant', u'Correctness and the Loop Invariant', 'AC', '100', '-', ''), ('2015-06-03 03:22:14', u'https://www.hackerrank.com/challenges/tutorial-intro', u'Intro to Tutorial Challenges', 'AC', '100', '-', ''), ('2015-06-10 11:27:13', u'https://www.hackerrank.com/contests/the-linux-bash-fest/challenges/text-processing-in-linux-the-grep-command-4', u"'Grep' - A", 'AC', '100', '-', ''), ('2015-06-10 11:32:34', u'https://www.hackerrank.com/contests/the-linux-bash-fest/challenges/paste-1', u'Paste - 1', 'AC', '100', '-', ''), ('2015-06-10 11:52:57', u'https://www.hackerrank.com/contests/the-linux-bash-fest/challenges/awk-1', u"'Awk' - 1", 'AC', '100', '-', ''), ('2015-06-10 11:56:28', u'https://www.hackerrank.com/contests/the-linux-bash-fest/challenges/awk-2', u"'Awk' - 2", 'AC', '100', '-', ''), ('2015-06-10 12:10:10', u'https://www.hackerrank.com/contests/the-linux-bash-fest/challenges/text-processing-in-linux-the-grep-command-5', u"'Grep' - B", 'AC', '100', '-', ''), ('2015-06-27 21:35:13', u'https://www.hackerrank.com/contests/segfault/challenges/three-loops', u'Three Loops', 'AC', '100', '-', ''), ('2015-06-27 22:25:24', u'https://www.hackerrank.com/contests/segfault/challenges/count-the-divisors', u'Count the Divisors', 'AC', '100', '-', ''), ('2015-08-01 21:58:15', u'https://www.hackerrank.com/contests/countercode/challenges/imba', u'Imba', 'AC', '100', '-', ''), ('2015-08-01 22:46:04', u'https://www.hackerrank.com/contests/countercode/challenges/campers', u'Campers', 'AC', '100', '-', ''), ('2015-10-30 02:51:27', u'https://www.hackerrank.com/contests/codestorm/challenges/emmas-notebook', u"Emma's Notebook", 'AC', '100', '-', ''), ('2016-08-06 21:37:21', u'https://www.hackerrank.com/contests/morgan-stanley-2016/challenges/jesse-and-profit', u'Jesse and Profit', 'AC', '100', '-', ''), ('2016-08-24 06:14:46', u'https://www.hackerrank.com/challenges/30-hello-world', u'Day 0: Hello, World.', 'AC', '100', '-', ''), ('2017-11-03 00:51:08', u'https://www.hackerrank.com/challenges/30-data-types', u'Day 1: Data Types', 'AC', '100', '-', '')],
"UVa": [('2016-12-11 20:21:23', 'https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=38', 'Ecological Bin Packing', 'WA', '0', 'C++', ''), ('2016-12-14 05:23:40', 'https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=38', 'Ecological Bin Packing', 'CE', '0', 'C++', '')],
"Timus": [('2018-07-01 01:41:04', 'http://acm.timus.ru/problem.aspx?space=1&num=1285&locale=en', u'1285. Thread in a Hyperspace', 'CE', '0', u'G++ 7.1', '')],
"AtCoder": [('2020-05-16 19:04:34', u'https://atcoder.jp/contests/abc135/tasks/abc135_d', 'D. Digits Parade', u'WA', 0.0, u'Python2 (2.7.6)', u'https://atcoder.jp/contests/abc135/submissions/13262993'), ('2020-05-18 12:04:47', u'https://atcoder.jp/contests/abc135/tasks/abc135_d', 'D. Digits Parade', u'WA', 0.0, u'Python2 (2.7.6)', u'https://atcoder.jp/contests/abc135/submissions/13368979'), ('2020-05-18 12:58:01', u'https://atcoder.jp/contests/agc010/tasks/agc010_a', 'A. Addition', u'RE', 0.0, u'Python2 (2.7.6)', u'https://atcoder.jp/contests/agc010/submissions/13370205')]
}
uva_problem_dict = utilities.get_problem_mappings(uvadb, uvadb.problem,
["problem_id",
"title"])
atcoder_problem_dict = utilities.get_problem_mappings(db,
db.atcoder_problems,
["problem_identifier",
"name"])
last_retrieved = time.strptime(str(current.INITIAL_DATE), "%Y-%m-%d %H:%M:%S")
for site in handles:
Profile = getattr(sites, site.lower()).Profile
if Profile.is_website_down():
continue
site_method = Profile(handles[site]).get_submissions
if site == "UVa":
submissions = site_method(last_retrieved, uva_problem_dict, False)
elif site == "AtCoder":
submissions = site_method(last_retrieved, atcoder_problem_dict, False)
else:
submissions = site_method(last_retrieved, False)
submissions = sorted(submissions)
if submissions != expected_result[site]:
raise RuntimeError("Submissions list does not match for " + site)
# ------------------------------------------------------------------------------
def test_retrieval(retrieval_object, method_name):
error_message = ""
for i in xrange(1):
try:
getattr(retrieval_object, method_name)()
return "Success"
except Exception as e:
error_message = method_name + " " + e.message
time.sleep(2)
return error_message
rt = RetrievalTest()
pushover_message = ""
for method_name in [
"test_tag_retrieval",
"test_editorial_retrieval",
"test_invalid_handle",
"test_download_submission",
"test_rating_graph",
"test_submissions",
"test_problem_setters_retrieval"
]:
res = test_retrieval(rt, method_name)
if res != "Success":
pushover_message += res + "\n"
if pushover_message != "":
print "pushover_message", pushover_message
response = requests.post("https://api.pushover.net/1/messages.json",
data={"token": current.pushover_api_token,
"user": current.pushover_user_token,
"message": pushover_message.strip(),
"title": "Extras retrieval failure",
"priority": 1},
verify=False).json()
| 612.135135
| 77,973
| 0.671394
| 37,884
| 226,490
| 4.006678
| 0.065727
| 0.070624
| 0.103723
| 0.123171
| 0.809051
| 0.76659
| 0.752044
| 0.730784
| 0.686868
| 0.661069
| 0
| 0.147039
| 0.077646
| 226,490
| 369
| 77,974
| 613.794038
| 0.579558
| 0.004468
| 0
| 0.204778
| 0
| 0.433447
| 0.773313
| 0.009853
| 0
| 0
| 0
| 0
| 0.061433
| 0
| null | null | 0.003413
| 0.020478
| null | null | 0.013652
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e3c2f844c2087ae92aae657651ec04883f4a7f20
| 1,680
|
py
|
Python
|
Leetcode/Python/_832.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | 1
|
2021-11-28T15:03:32.000Z
|
2021-11-28T15:03:32.000Z
|
Leetcode/Python/_832.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
Leetcode/Python/_832.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
class Solution:
def flipAndInvertImage(self, image: List[List[int]]) -> List[List[int]]:
rows = len(image)
cols = len(image[0])
flag = False
if cols%2 != 0:
flag = True
for row in range(rows):
if flag:
image[row][cols//2] = 1 ^ image[row][cols//2]
for col in range(cols//2):
temp = image[row][col]
image[row][col] = 1 ^ image[row][cols-col-1]
image[row][cols-col-1] = 1 ^ temp
return image
class Solution:
def flipAndInvertImage(self, image: List[List[int]]) -> List[List[int]]:
hashMap = {0:1, 1:0}
rows = len(image)
cols = len(image[0])
for row in range(rows):
for col in range(cols//2):
temp = image[row][col]
image[row][col] = image[row][cols-col-1]
image[row][cols-col-1] = temp
for row in range(rows):
for col in range(cols):
image[row][col] = hashMap[image[row][col]]
return image
class Solution:
def flipAndInvertImage(self, image: List[List[int]]) -> List[List[int]]:
hashMap = {0:1, 1:0}
rows = len(image)
cols = len(image[0])
flag = False
if cols%2 != 0:
flag = True
for row in range(rows):
if flag:
image[row][cols//2] = hashMap[image[row][cols//2]]
for col in range(cols//2):
temp = image[row][col]
image[row][col] = hashMap[image[row][cols-col-1]]
image[row][cols-col-1] = hashMap[temp]
return image
| 35
| 76
| 0.48631
| 219
| 1,680
| 3.730594
| 0.118721
| 0.176255
| 0.146879
| 0.110159
| 0.948592
| 0.948592
| 0.908201
| 0.906977
| 0.906977
| 0.906977
| 0
| 0.029245
| 0.369048
| 1,680
| 47
| 77
| 35.744681
| 0.741509
| 0
| 0
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3c838bad514972b2d06743d0769fdd8d16b43c5
| 8,612
|
py
|
Python
|
tests/concurrency_oracle_test.py
|
AutomatedProcessImprovement/start-time-estimator
|
ec9884a03169ab8db4bf29799b2cebcf085aff48
|
[
"Apache-2.0"
] | null | null | null |
tests/concurrency_oracle_test.py
|
AutomatedProcessImprovement/start-time-estimator
|
ec9884a03169ab8db4bf29799b2cebcf085aff48
|
[
"Apache-2.0"
] | null | null | null |
tests/concurrency_oracle_test.py
|
AutomatedProcessImprovement/start-time-estimator
|
ec9884a03169ab8db4bf29799b2cebcf085aff48
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
from estimate_start_times.concurrency_oracle import AlphaConcurrencyOracle, HeuristicsConcurrencyOracle, \
NoConcurrencyOracle, DeactivatedConcurrencyOracle
from estimate_start_times.config import Configuration, HeuristicsThresholds
from estimate_start_times.utils import read_csv_log
def test_deactivated_concurrency_oracle():
config = Configuration()
concurrency_oracle = DeactivatedConcurrencyOracle(config)
# The configuration for the algorithm is the passed
assert concurrency_oracle.config == config
# Empty set as concurrency by default
assert concurrency_oracle.concurrency == {}
# The concurrency option is deactivated, so always return [non_estimated_time]
assert concurrency_oracle.enabled_since(None, datetime.now()) == config.non_estimated_time
# There is no concurrency, so always enabled since the last event finished
assert concurrency_oracle.enabled_since(None, datetime.fromisoformat('2012-11-07T10:00:00.000+02:00')) == config.non_estimated_time
# [non_estimated_time] as the enablement time of the first event in the trace
assert concurrency_oracle.enabled_since(None, datetime.fromisoformat('2006-07-20T22:03:11.000+02:00')) == config.non_estimated_time
def test_no_concurrency_oracle():
config = Configuration()
event_log = read_csv_log('./tests/assets/test_event_log_1.csv', config)
concurrency_oracle = NoConcurrencyOracle(event_log, config)
# No concurrency by default
assert concurrency_oracle.concurrency == {'A': set(), 'B': set(), 'C': set(), 'D': set(), 'E': set(), 'F': set(), 'G': set(),
'H': set(), 'I': set()}
# The configuration for the algorithm is the passed
assert concurrency_oracle.config == config
# There is no concurrency, so always enabled since the last event finished
first_trace = event_log[event_log[config.log_ids.case] == 'trace-01']
assert concurrency_oracle.enabled_since(first_trace, first_trace.iloc[4]) == first_trace.iloc[3][config.log_ids.end_time]
# There is no concurrency, so always enabled since the last event finished
third_trace = event_log[event_log[config.log_ids.case] == 'trace-03']
assert concurrency_oracle.enabled_since(third_trace, third_trace.iloc[3]) == third_trace.iloc[2][config.log_ids.end_time]
# [non_estimated_time] as the enablement time of the first event in the trace
fourth_trace = event_log[event_log[config.log_ids.case] == 'trace-04']
assert concurrency_oracle.enabled_since(fourth_trace, fourth_trace.iloc[0]) == config.non_estimated_time
def test_alpha_concurrency_oracle():
config = Configuration()
event_log = read_csv_log('./tests/assets/test_event_log_1.csv', config)
concurrency_oracle = AlphaConcurrencyOracle(event_log, config)
# Concurrency between the activities that appear both one before the other
assert concurrency_oracle.concurrency == {'A': set(), 'B': set(), 'C': {'D'}, 'D': {'C'}, 'E': set(),
'F': set(), 'G': set(), 'H': set(), 'I': set()}
# The configuration for the algorithm is the passed
assert concurrency_oracle.config == config
# Enabled since the previous event when there is no concurrency
first_trace = event_log[event_log[config.log_ids.case] == 'trace-01']
assert concurrency_oracle.enabled_since(first_trace, first_trace.iloc[6]) == first_trace.iloc[5][config.log_ids.end_time]
# Enabled since the previous event when there is no concurrency
third_trace = event_log[event_log[config.log_ids.case] == 'trace-03']
assert concurrency_oracle.enabled_since(third_trace, third_trace.iloc[5]) == third_trace.iloc[4][config.log_ids.end_time]
# Enabled since its causal input for an event when the previous one is concurrent
second_trace = event_log[event_log[config.log_ids.case] == 'trace-02']
assert concurrency_oracle.enabled_since(second_trace, second_trace.iloc[3]) == second_trace.iloc[1][config.log_ids.end_time]
# Enabled since its causal input for an event when the previous one is concurrent
fourth_trace = event_log[event_log[config.log_ids.case] == 'trace-04']
assert concurrency_oracle.enabled_since(fourth_trace, fourth_trace.iloc[3]) == fourth_trace.iloc[1][config.log_ids.end_time]
# [non_estimated_time] as the enablement time of the first event in the trace
assert concurrency_oracle.enabled_since(fourth_trace, fourth_trace.iloc[0]) == config.non_estimated_time
def test_heuristics_concurrency_oracle_simple():
config = Configuration()
event_log = read_csv_log('./tests/assets/test_event_log_1.csv', config)
concurrency_oracle = HeuristicsConcurrencyOracle(event_log, config)
# Concurrency between the activities that appear both one before the other
assert concurrency_oracle.concurrency == {'A': set(), 'B': set(), 'C': {'D'}, 'D': {'C'}, 'E': set(),
'F': set(), 'G': set(), 'H': set(), 'I': set()}
# The configuration for the algorithm is the passed
assert concurrency_oracle.config == config
# Enabled since the previous event when there is no concurrency
first_trace = event_log[event_log[config.log_ids.case] == 'trace-01']
assert concurrency_oracle.enabled_since(first_trace, first_trace.iloc[6]) == first_trace.iloc[5][config.log_ids.end_time]
# Enabled since the previous event when there is no concurrency
third_trace = event_log[event_log[config.log_ids.case] == 'trace-03']
assert concurrency_oracle.enabled_since(third_trace, third_trace.iloc[5]) == third_trace.iloc[4][config.log_ids.end_time]
# Enabled since its causal input for an event when the previous one is concurrent
second_trace = event_log[event_log[config.log_ids.case] == 'trace-02']
assert concurrency_oracle.enabled_since(second_trace, second_trace.iloc[3]) == second_trace.iloc[1][config.log_ids.end_time]
# Enabled since its causal input for an event when the previous one is concurrent
fourth_trace = event_log[event_log[config.log_ids.case] == 'trace-04']
assert concurrency_oracle.enabled_since(fourth_trace, fourth_trace.iloc[3]) == fourth_trace.iloc[1][config.log_ids.end_time]
# [non_estimated_time] as the enablement time of the first event in the trace
assert concurrency_oracle.enabled_since(fourth_trace, fourth_trace.iloc[0]) == config.non_estimated_time
def test_heuristics_concurrency_oracle_multi_parallel():
config = Configuration()
event_log = read_csv_log('./tests/assets/test_event_log_3.csv', config)
concurrency_oracle = HeuristicsConcurrencyOracle(event_log, config)
# The configuration for the algorithm is the passed
assert concurrency_oracle.config == config
# Concurrency between the activities that appear both one before the other
assert concurrency_oracle.concurrency == {
'A': set(),
'B': set(),
'C': {'D', 'F', 'G'},
'D': {'C', 'E'},
'E': {'D', 'F', 'G'},
'F': {'C', 'E'},
'G': {'C', 'E'},
'H': set(),
'I': set()
}
def test_heuristics_concurrency_oracle_multi_parallel_noise():
config = Configuration()
event_log = read_csv_log('./tests/assets/test_event_log_3_noise.csv', config)
concurrency_oracle = HeuristicsConcurrencyOracle(event_log, config)
# The configuration for the algorithm is the passed
assert concurrency_oracle.config == config
# Concurrency between the activities that appear both one before the other
assert concurrency_oracle.concurrency == {
'A': set(),
'B': set(),
'C': {'D', 'F', 'G'},
'D': {'C', 'E'},
'E': {'D', 'F', 'G'},
'F': {'C', 'E'},
'G': {'C', 'E'},
'H': set(),
'I': set()
}
# Increasing the thresholds so the directly-follows relations and the length-2 loops
# detection only detect when the relation happens all the times the activities appear.
config = Configuration(heuristics_thresholds=HeuristicsThresholds(df=1.0, l2l=1.0))
concurrency_oracle = HeuristicsConcurrencyOracle(event_log, config)
# The configuration for the algorithm is the passed
assert concurrency_oracle.config == config
# Concurrency between the activities that appear both one before the other
assert concurrency_oracle.concurrency == {
'A': set(),
'B': set(),
'C': {'D', 'F', 'G'},
'D': {'C', 'E'},
'E': {'D', 'F', 'G'},
'F': {'C', 'E'},
'G': {'C', 'E'},
'H': {'I'},
'I': {'H'}
}
| 57.413333
| 135
| 0.701115
| 1,164
| 8,612
| 4.975945
| 0.110825
| 0.129144
| 0.11913
| 0.082873
| 0.856526
| 0.853936
| 0.852728
| 0.812155
| 0.792818
| 0.785394
| 0
| 0.013856
| 0.178704
| 8,612
| 149
| 136
| 57.798658
| 0.805033
| 0.245239
| 0
| 0.711538
| 0
| 0
| 0.065409
| 0.036957
| 0
| 0
| 0
| 0
| 0.288462
| 1
| 0.057692
| false
| 0
| 0.038462
| 0
| 0.096154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3cc74cc84188443c3bd83aed436a62986ff8518
| 133
|
py
|
Python
|
vedasal/criteria/builder.py
|
Kuro96/vedasal
|
3c5588bf12059af5bd7bc779fd5f9dc0b2901cb2
|
[
"Apache-2.0"
] | 2
|
2020-11-06T06:39:04.000Z
|
2020-11-11T03:39:22.000Z
|
vedasal/criteria/builder.py
|
Kuro96/vedasal
|
3c5588bf12059af5bd7bc779fd5f9dc0b2901cb2
|
[
"Apache-2.0"
] | null | null | null |
vedasal/criteria/builder.py
|
Kuro96/vedasal
|
3c5588bf12059af5bd7bc779fd5f9dc0b2901cb2
|
[
"Apache-2.0"
] | null | null | null |
from vedacore.misc import registry, build_from_cfg
def build_criterion(cfg):
return build_from_cfg(cfg, registry, 'criterion')
| 22.166667
| 53
| 0.789474
| 19
| 133
| 5.263158
| 0.526316
| 0.18
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12782
| 133
| 5
| 54
| 26.6
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0.067669
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
584c5ab652482b808aa02756a227b4da76b05849
| 5,556
|
py
|
Python
|
ptsemseg/models/__init__.py
|
PhyllisH/MultiAgentPerception
|
f73b8131cd4314c42f09d3502bd63d434a4ec219
|
[
"MIT"
] | 30
|
2020-06-16T00:19:19.000Z
|
2022-03-18T19:37:48.000Z
|
ptsemseg/models/__init__.py
|
hyzcn/MultiAgentPerception
|
f73b8131cd4314c42f09d3502bd63d434a4ec219
|
[
"MIT"
] | 2
|
2021-11-08T01:46:18.000Z
|
2021-11-18T06:45:06.000Z
|
ptsemseg/models/__init__.py
|
hyzcn/MultiAgentPerception
|
f73b8131cd4314c42f09d3502bd63d434a4ec219
|
[
"MIT"
] | 8
|
2020-07-09T03:08:45.000Z
|
2022-03-24T16:07:29.000Z
|
import copy
import torchvision.models as models
from ptsemseg.models.agent import Single_agent, All_agents, LearnWho2Com, LearnWhen2Com, MIMOcom,MIMO_All_agents, MIMOcomWho
def get_model(model_dict, n_classes, version=None):
name = model_dict["model"]["arch"]
model = _get_model_instance(name)
in_channels = 3
if name == "Single_agent":
model = model(n_classes=n_classes, in_channels=in_channels,
enc_backbone=model_dict["model"]['enc_backbone'],
dec_backbone=model_dict["model"]['dec_backbone'],
feat_squeezer=model_dict["model"]['feat_squeezer'],
feat_channel=model_dict["model"]['feat_channel'])
elif name == "All_agents":
model = model(n_classes=n_classes, in_channels=in_channels,
aux_agent_num=model_dict["model"]['agent_num'],
shuffle_flag=model_dict["model"]['shuffle_features'],
enc_backbone=model_dict["model"]['enc_backbone'],
dec_backbone=model_dict["model"]['dec_backbone'],
feat_squeezer=model_dict["model"]['feat_squeezer'],
feat_channel=model_dict["model"]['feat_channel'])
elif name == "MIMO_All_agents":
model = model(n_classes=n_classes, in_channels=in_channels,
aux_agent_num=model_dict["model"]['agent_num'],
shuffle_flag=model_dict["model"]['shuffle_features'],
enc_backbone=model_dict["model"]['enc_backbone'],
dec_backbone=model_dict["model"]['dec_backbone'],
feat_squeezer=model_dict["model"]['feat_squeezer'],
feat_channel=model_dict["model"]['feat_channel'])
elif name == "LearnWho2Com":
model = model(n_classes=n_classes, in_channels=in_channels,
attention=model_dict["model"]['attention'],has_query=model_dict["model"]['query'],
sparse=model_dict["model"]['sparse'],
aux_agent_num=model_dict["model"]['agent_num'],
shared_img_encoder=model_dict["model"]["shared_img_encoder"],
image_size=model_dict["data"]["img_rows"],
query_size=model_dict["model"]["query_size"],key_size=model_dict["model"]["key_size"],
enc_backbone=model_dict["model"]['enc_backbone'],
dec_backbone=model_dict["model"]['dec_backbone']
)
elif name == "LearnWhen2Com":
model = model(n_classes=n_classes, in_channels=in_channels,
attention=model_dict["model"]['attention'],has_query=model_dict["model"]['query'],
sparse=model_dict["model"]['sparse'],
aux_agent_num=model_dict["model"]['agent_num'],
shared_img_encoder=model_dict["model"]["shared_img_encoder"],
image_size=model_dict["data"]["img_rows"],
query_size=model_dict["model"]["query_size"],key_size=model_dict["model"]["key_size"],
enc_backbone=model_dict["model"]['enc_backbone'],
dec_backbone=model_dict["model"]['dec_backbone']
)
elif name == "MIMOcom":
model = model(n_classes=n_classes, in_channels=in_channels,
attention=model_dict["model"]['attention'],has_query=model_dict["model"]['query'],
sparse=model_dict["model"]['sparse'],
agent_num=model_dict["model"]['agent_num'],
shared_img_encoder=model_dict["model"]["shared_img_encoder"],
image_size=model_dict["data"]["img_rows"],
query_size=model_dict["model"]["query_size"],key_size=model_dict["model"]["key_size"],
enc_backbone=model_dict["model"]['enc_backbone'],
dec_backbone=model_dict["model"]['dec_backbone']
)
elif name == "MIMOcomWho":
model = model(n_classes=n_classes, in_channels=in_channels,
attention=model_dict["model"]['attention'],has_query=model_dict["model"]['query'],
sparse=model_dict["model"]['sparse'],
agent_num=model_dict["model"]['agent_num'],
shared_img_encoder=model_dict["model"]["shared_img_encoder"],
image_size=model_dict["data"]["img_rows"],
query_size=model_dict["model"]["query_size"],key_size=model_dict["model"]["key_size"],
enc_backbone=model_dict["model"]['enc_backbone'],
dec_backbone=model_dict["model"]['dec_backbone']
)
else:
model = model(n_classes=n_classes, in_channels=in_channels,
enc_backbone=model_dict["model"]['enc_backbone'],
dec_backbone=model_dict["model"]['dec_backbone'])
return model
def _get_model_instance(name):
try:
return {
"Single_agent": Single_agent,
"All_agents": All_agents,
"MIMO_All_agents": MIMO_All_agents,
'LearnWho2Com':LearnWho2Com,
'LearnWhen2Com': LearnWhen2Com,
'MIMOcom': MIMOcom,
'MIMOcomWho': MIMOcomWho,
}[name]
except:
raise ("Model {} not available".format(name))
| 54.470588
| 124
| 0.567495
| 588
| 5,556
| 4.979592
| 0.107143
| 0.184426
| 0.262978
| 0.120219
| 0.826161
| 0.814208
| 0.814208
| 0.814208
| 0.814208
| 0.814208
| 0
| 0.002313
| 0.299676
| 5,556
| 101
| 125
| 55.009901
| 0.750193
| 0
| 0
| 0.626374
| 0
| 0
| 0.195104
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021978
| false
| 0
| 0.032967
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5852ed6dde129e90e4c080dd55f9b986f7a66de9
| 12,608
|
py
|
Python
|
timepiece/crm/tests/test_relationships.py
|
sha-red/django-timepiece
|
52515dec027664890efbc535429e1ba1ee152f40
|
[
"MIT"
] | 244
|
2015-01-08T11:06:52.000Z
|
2022-03-24T14:59:26.000Z
|
timepiece/crm/tests/test_relationships.py
|
skampala1/django-timepiece
|
52515dec027664890efbc535429e1ba1ee152f40
|
[
"MIT"
] | 80
|
2015-01-23T13:45:02.000Z
|
2021-11-25T09:17:05.000Z
|
timepiece/crm/tests/test_relationships.py
|
anfema/django-timepiece
|
c4594ae053bdb14c1dd7f17f32c6ef1bf0f3b453
|
[
"MIT"
] | 109
|
2015-01-24T06:52:56.000Z
|
2022-03-29T09:35:06.000Z
|
from django.contrib.auth.models import Permission
from django.core.urlresolvers import reverse
from django.test import TestCase
from timepiece.tests import factories
from timepiece.tests.base import ViewTestMixin
from timepiece.crm.models import ProjectRelationship
class RelationshipTestBase(TestCase):
def setUp(self):
super(RelationshipTestBase, self).setUp()
self.user = factories.User()
self.permissions = [Permission.objects.get(codename=n)
for n in self.perm_names]
self.user.user_permissions.add(*self.permissions)
self.login_user(self.user)
self.project = factories.Project()
class TestAddProjectToUser(ViewTestMixin, RelationshipTestBase):
url_name = 'create_relationship'
perm_names = ['add_projectrelationship']
@property
def get_kwargs(self):
return {'user_id': self.user.pk}
def _data(self):
return {'project_1': self.project.pk}
def test_other_methods(self):
"""Add Project Relationship requires POST."""
for method in (self.client.get, self.client.head, self.client.put,
self.client.delete):
response = method(self._url())
self.assertEquals(
response.status_code, 405, '{method} request '
'did not have expected code: {actual} instead of '
'{expected}'.format(method=method, actual=response.status_code, expected=405))
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_permission(self):
"""Permission is required to add a project relationship."""
self.user.user_permissions.remove(*self.permissions)
response = self._post()
self.assertEquals(response.status_code, 302)
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_bad_user_id(self):
"""Bad user id should return a 404 response."""
response = self._post(get_kwargs={'user_id': '12345'})
self.assertEquals(response.status_code, 404)
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_bad_project_id(self):
"""Bad project id should cause no change."""
response = self._post(data={'project_1': '12345'})
self.assertEquals(response.status_code, 302)
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_add_again(self):
"""Adding project again should have no effect."""
rel = factories.ProjectRelationship(project=self.project, user=self.user)
response = self._post(data=self._data())
self.assertEquals(response.status_code, 302)
rel = ProjectRelationship.objects.get()
self.assertEquals(rel.project, self.project)
self.assertEquals(rel.user, self.user)
def test_redirect_to_dashboard(self):
"""Adding a relationship should redirect to the dashboard by default."""
response = self._post(data=self._data())
self.assertRedirectsNoFollow(response, reverse('dashboard'))
rel = ProjectRelationship.objects.get()
self.assertEquals(rel.project, self.project)
self.assertEquals(rel.user, self.user)
def test_redirect_to_next(self):
"""Adding a relationship should redirect to next url if available."""
get_kwargs = self.get_kwargs
get_kwargs.update({'next': '/hello'})
response = self._post(data=self._data(), get_kwargs=get_kwargs)
self.assertRedirectsNoFollow(response, '/hello')
rel = ProjectRelationship.objects.get()
self.assertEquals(rel.project, self.project)
self.assertEquals(rel.user, self.user)
class TestAddUserToProject(ViewTestMixin, RelationshipTestBase):
url_name = 'create_relationship'
perm_names = ['change_projectrelationship', 'add_projectrelationship']
@property
def get_kwargs(self):
return {'project_id': self.project.pk}
def _data(self):
return {'user_1': self.user.pk}
def test_other_methods(self):
"""Add Project Relationship requires POST."""
for method in (self.client.get, self.client.head, self.client.put,
self.client.delete):
response = method(self._url())
self.assertEquals(
response.status_code, 405, '{method} request did not have '
'expected code: {actual} instead of '
'{expected}'.format(method=method, actual=response.status_code, expected=405))
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_permission(self):
"""Permission is required to add a project relationship."""
self.user.user_permissions.remove(*self.permissions)
response = self._post()
self.assertEquals(response.status_code, 302)
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_bad_project_id(self):
"""Bad project id should return a 404 response."""
response = self._post(get_kwargs={'project_id': '12345'})
self.assertEquals(response.status_code, 404)
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_bad_user_id(self):
"""Bad user id should cause no change."""
response = self._post(data={'user_1': '12345'})
self.assertEquals(response.status_code, 302)
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_add_again(self):
"""Adding user again should have no effect."""
rel = factories.ProjectRelationship(
project=self.project, user=self.user)
response = self._post(data=self._data())
self.assertEquals(response.status_code, 302)
rel = ProjectRelationship.objects.get()
self.assertEquals(rel.project, self.project)
self.assertEquals(rel.user, self.user)
def test_redirect_to_dashboard(self):
"""Adding a relationship hould redirect to the dashboard by default."""
response = self._post(data=self._data())
self.assertRedirectsNoFollow(response, reverse('dashboard'))
rel = ProjectRelationship.objects.get()
self.assertEquals(rel.project, self.project)
self.assertEquals(rel.user, self.user)
def test_redirect_to_next(self):
"""Adding a relationship should redirect to next url if available."""
get_kwargs = self.get_kwargs
get_kwargs.update({'next': '/hello'})
response = self._post(data=self._data(), get_kwargs=get_kwargs)
self.assertRedirectsNoFollow(response, '/hello')
rel = ProjectRelationship.objects.get()
self.assertEquals(rel.project, self.project)
self.assertEquals(rel.user, self.user)
class TestEditRelationship(ViewTestMixin, RelationshipTestBase):
url_name = 'edit_relationship'
perm_names = ['change_projectrelationship']
@property
def get_kwargs(self):
return {'project_id': self.project.pk, 'user_id': self.user.pk}
def _data(self):
return {'types': [self.rel_type1.pk, self.rel_type2.pk]}
def setUp(self):
super(TestEditRelationship, self).setUp()
self.relationship = factories.ProjectRelationship(
project=self.project, user=self.user)
self.rel_type1 = factories.RelationshipType()
self.rel_type2 = factories.RelationshipType()
def test_permission(self):
"""Permission is required to edit a project relationship."""
self.user.user_permissions.remove(*self.permissions)
for method in (self._get, self._post):
response = method()
self.assertEquals(response.status_code, 302)
def test_bad_user_id(self):
"""Bad user id should return a 404 response."""
get_kwargs = {'user_id': '12345', 'project_id': self.project.pk}
for method in (self._get, self._post):
response = method(get_kwargs=get_kwargs)
self.assertEquals(response.status_code, 404)
rel = ProjectRelationship.objects.get()
self.assertEquals(rel, self.relationship)
def test_bad_project_id(self):
"""Bad project id should return a 404 response."""
get_kwargs = {'user_id': self.user.pk, 'project_id': '12345'}
for method in (self._get, self._post):
response = method(get_kwargs=get_kwargs)
self.assertEquals(response.status_code, 404)
rel = ProjectRelationship.objects.get()
self.assertEquals(rel, self.relationship)
def test_non_existant_relationship(self):
"""Should return 404 response."""
self.relationship.delete()
for method in (self._get, self._post):
response = method()
self.assertEquals(response.status_code, 404)
self.assertEquals(ProjectRelationship.objects.count(), 0)
def test_get(self):
"""GET request should return form with bound data."""
response = self._get()
self.assertEquals(response.status_code, 200)
context = response.context
form = context['form']
self.assertEquals(ProjectRelationship.objects.get(), self.relationship)
self.assertEqual(context['object'], self.relationship)
self.assertFalse(form.is_bound)
self.assertEquals(form.instance, self.relationship)
def test_redirect_to_project_page(self):
"""Editing a relationship should redirect to project by default."""
project_url = reverse('view_project', args=(self.project.pk,))
response = self._post(data=self._data())
self.assertRedirectsNoFollow(response, project_url)
rel = ProjectRelationship.objects.get()
self.assertEquals(rel.project, self.project)
self.assertEquals(rel.user, self.user)
self.assertEquals(rel.types.count(), 2)
self.assertTrue(self.rel_type1 in rel.types.all())
self.assertTrue(self.rel_type2 in rel.types.all())
def test_redirect_to_next(self):
"""Editing a relationship should redirect to next url if available."""
get_kwargs = self.get_kwargs
get_kwargs.update({'next': '/hello'})
response = self._post(data=self._data(), get_kwargs=get_kwargs)
self.assertRedirectsNoFollow(response, '/hello')
rel = ProjectRelationship.objects.get()
self.assertEquals(rel.project, self.project)
self.assertEquals(rel.user, self.user)
self.assertEquals(rel.types.count(), 2)
self.assertTrue(self.rel_type1 in rel.types.all())
self.assertTrue(self.rel_type2 in rel.types.all())
class TestDeleteRelationship(ViewTestMixin, RelationshipTestBase):
url_name = 'delete_relationship'
perm_names = ['delete_projectrelationship']
def setUp(self):
super(TestDeleteRelationship, self).setUp()
self.relationship = factories.ProjectRelationship(
project=self.project, user=self.user)
@property
def get_kwargs(self):
return {'project_id': self.project.pk, 'user_id': self.user.pk}
def test_get_no_delete(self):
"""Remove Project Relationship renders but doesn't delete on GET"""
response = self._get()
self.assertEqual(response.status_code, 200)
self.assertEqual(ProjectRelationship.objects.count(), 1)
def test_permission(self):
"""Permission is required to delete a project relationship."""
self.user.user_permissions.remove(*self.permissions)
response = self._post()
self.assertEquals(response.status_code, 302)
self.assertEquals(ProjectRelationship.objects.count(), 1)
def test_bad_user_id(self):
"""Bad user id should return a 404 response."""
get_kwargs = {'user_id': '12345', 'project_id': self.project.pk}
response = self._post(get_kwargs=get_kwargs)
self.assertEquals(response.status_code, 404)
self.assertEquals(ProjectRelationship.objects.count(), 1)
def test_bad_project_id(self):
"""Bad project id should return a 404 response."""
get_kwargs = {'user_id': self.user.pk, 'project_id': '12345'}
response = self._post(get_kwargs=get_kwargs)
self.assertEquals(response.status_code, 404)
self.assertEquals(ProjectRelationship.objects.count(), 1)
def test_non_existant_relationship(self):
"""Assure 404 is raised if the project relationship doesn't exist"""
self.relationship.delete()
response = self._post()
self.assertEquals(response.status_code, 404)
self.assertEquals(ProjectRelationship.objects.count(), 0)
| 41.337705
| 94
| 0.669575
| 1,440
| 12,608
| 5.711806
| 0.095833
| 0.105046
| 0.048146
| 0.069301
| 0.808389
| 0.793921
| 0.778967
| 0.772036
| 0.736657
| 0.715137
| 0
| 0.016
| 0.216767
| 12,608
| 304
| 95
| 41.473684
| 0.816911
| 0.103823
| 0
| 0.742081
| 0
| 0
| 0.057933
| 0.011103
| 0
| 0
| 0
| 0
| 0.307692
| 1
| 0.162896
| false
| 0
| 0.027149
| 0.031674
| 0.280543
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
544bad771e884b603b1c23799fb3c65949424aa0
| 14,862
|
py
|
Python
|
nemo/collections/nlp/modules/common/megatron/retrieval_transformer.py
|
entn-at/NeMo
|
5ed583ce5a26667bd154e8fb13f324274476d261
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/nlp/modules/common/megatron/retrieval_transformer.py
|
entn-at/NeMo
|
5ed583ce5a26667bd154e8fb13f324274476d261
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/nlp/modules/common/megatron/retrieval_transformer.py
|
entn-at/NeMo
|
5ed583ce5a26667bd154e8fb13f324274476d261
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Retrival Transformer."""
from einops import rearrange, repeat
from nemo.collections.nlp.modules.common.megatron.module import MegatronModule
from nemo.collections.nlp.modules.common.megatron.rotary_pos_embedding import RotaryEmbedding
from nemo.collections.nlp.modules.common.megatron.transformer import ParallelTransformer
from nemo.collections.nlp.modules.common.megatron.utils import ApexGuardDefaults, build_attention_mask_3d
try:
from apex.transformer.enums import AttnMaskType, ModelType
HAVE_APEX = True
except (ImportError, ModuleNotFoundError):
# fake missing classes with None attributes
AttnMaskType = ApexGuardDefaults()
ModelType = ApexGuardDefaults()
HAVE_APEX = False
class MegatronRetrievalTransformerEncoderModule(MegatronModule):
"""Transformer encoder model.
"""
def __init__(
self,
init_method,
output_layer_init_method,
hidden_size,
ffn_hidden_size,
num_layers,
num_attention_heads,
apply_query_key_layer_scaling=True,
kv_channels=None,
layer_type=[],
pre_process=True,
post_process=True,
use_cpu_initialization=False,
attn_mask_type=AttnMaskType.padding,
hidden_dropout=0.1,
attention_dropout=0.1,
precision=16,
fp32_residual_connection=False,
activations_checkpoint_method=None,
activations_checkpoint_num_layers=1,
layernorm_epsilon=1e-5,
bias_gelu_fusion=True,
bias_dropout_add_fusion=True,
masked_softmax_fusion=True,
persist_layer_norm=False,
openai_gelu=False,
onnx_safe=False,
activation='gelu',
bias=True,
parent_model_type=ModelType.encoder_or_decoder,
chunk_size=64,
):
super(MegatronRetrievalTransformerEncoderModule, self).__init__()
self.pre_process = pre_process
self.post_process = post_process
self.hidden_size = hidden_size
self.num_layers = num_layers
self.init_method = init_method
self.model_attn_mask_type = attn_mask_type
self.hidden_dropout = hidden_dropout
self.output_layer_init_method = output_layer_init_method
self.parent_model_type = parent_model_type
if kv_channels is None:
assert (
hidden_size % num_attention_heads == 0
), 'hidden_size must be divisible by num_attention_heads if kv_channels is None'
kv_channels = hidden_size // num_attention_heads
# Transformer.
self.model = ParallelTransformer(
init_method=self.init_method,
output_layer_init_method=self.output_layer_init_method,
num_layers=self.num_layers,
hidden_size=self.hidden_size,
num_attention_heads=num_attention_heads,
apply_query_key_layer_scaling=apply_query_key_layer_scaling,
kv_channels=kv_channels,
layer_type=layer_type,
ffn_hidden_size=ffn_hidden_size,
self_attn_mask_type=self.model_attn_mask_type,
pre_process=self.pre_process,
post_process=self.post_process,
precision=precision,
fp32_residual_connection=fp32_residual_connection,
activations_checkpoint_method=activations_checkpoint_method,
activations_checkpoint_num_layers=activations_checkpoint_num_layers,
layernorm_epsilon=layernorm_epsilon,
hidden_dropout=hidden_dropout,
attention_dropout=attention_dropout,
use_cpu_initialization=use_cpu_initialization,
bias_gelu_fusion=bias_gelu_fusion,
bias_dropout_fusion=bias_dropout_add_fusion,
masked_softmax_fusion=masked_softmax_fusion,
persist_layer_norm=persist_layer_norm,
openai_gelu=openai_gelu,
onnx_safe=onnx_safe,
activation=activation,
bias=bias,
model_type=parent_model_type,
chunk_size=chunk_size,
)
rot_dim = hidden_size // num_attention_heads if kv_channels is None else kv_channels
self.rotary_pos_emb = RotaryEmbedding(rot_dim)
self.chunk_size = chunk_size
self._model_key = 'model'
def set_input_tensor(self, input_tensor):
""" See megatron.model.transformer.set_input_tensor()"""
self.model.set_input_tensor(input_tensor)
def forward(
self,
enc_input,
enc_attn_mask,
context_attn_mask=None,
encoder_output=None,
layer_past=None,
get_key_value=False,
):
# expected enc_input shape [batch, num_chunks, num_neighbors, retrival_seq_len, dim]
# expected enc_attn_mask shape [batch, num_chunks, num_neighbors, retrival_seq_len]
# expected encoder_output shape [batch, seq_len, dim]
b, k, r, rn, dim = enc_input.shape
# batch, seq_len, dim
_, n, _ = encoder_output.shape
num_seq_chunks = n // self.chunk_size
assert k == num_seq_chunks, f'sequence requires {num_seq_chunks} retrieved chunks, but only {k} passed in'
seq_index = num_seq_chunks * self.chunk_size
retrieved = rearrange(enc_input, 'b k r n d -> (b k r) n d')
enc_attn_mask = rearrange(enc_attn_mask, 'b k r n -> (b k r) n')
embed_as_context = repeat(encoder_output[:, :seq_index], 'b (k n) d -> (b k r) n d', n=self.chunk_size, r=r)
context_attn_mask = repeat(context_attn_mask[:, :seq_index], 'b (k n) -> (b k r) n', n=self.chunk_size, r=r)
# need to add extra chunk size, since it will be shifted
cross_attn_q_pos_emb = self.rotary_pos_emb(rn, offset=0)
cross_attn_k_pos_emb = self.rotary_pos_emb(self.chunk_size)
attn_pos_emb = (cross_attn_q_pos_emb, cross_attn_q_pos_emb, cross_attn_k_pos_emb)
# # convert to Megatron mask
enc_attn_mask_3d = build_attention_mask_3d(
source_mask=enc_attn_mask, target_mask=enc_attn_mask, attn_mask_type=self.model_attn_mask_type,
)
enc_attn_mask_3d = enc_attn_mask_3d[:, None, :, :]
enc_dec_attn_mask_3d = build_attention_mask_3d(
source_mask=enc_attn_mask, target_mask=context_attn_mask, attn_mask_type=AttnMaskType.padding,
)
enc_dec_attn_mask_3d = enc_dec_attn_mask_3d[:, None, :, :]
# transformer encoder
enc_output = self.model(
retrieved,
enc_attn_mask_3d,
layer_past=layer_past,
get_key_value=get_key_value,
encoder_output=embed_as_context,
enc_dec_attn_mask=enc_dec_attn_mask_3d,
rotary_pos_emb=attn_pos_emb,
)
# revert back to original retrieved shape
enc_output = rearrange(enc_output, '(b k r) n d -> b k r n d', b=b, k=k)
return enc_output
def state_dict_for_save_checkpoint(self, destination=None, prefix='', keep_vars=False):
"""For easy load."""
state_dict_ = {}
state_dict_[self._model_key] = self.model.state_dict_for_save_checkpoint(destination, prefix, keep_vars)
return state_dict_
def load_state_dict(self, state_dict, strict=True):
"""Customized load."""
# Encoder.
if self._model_key in state_dict:
state_dict_ = state_dict[self._model_key]
self.model.load_state_dict(state_dict_, strict=strict)
class MegatronRetrievalTransformerDecoderModule(MegatronModule):
"""Transformer decoder model.
"""
def __init__(
self,
init_method,
output_layer_init_method,
hidden_size,
ffn_hidden_size,
num_layers,
num_attention_heads,
apply_query_key_layer_scaling=True,
kv_channels=None,
layer_type=[],
pre_process=True,
post_process=True,
use_cpu_initialization=False,
attn_mask_type=AttnMaskType.causal,
hidden_dropout=0.1,
attention_dropout=0.1,
precision=16,
fp32_residual_connection=False,
activations_checkpoint_method=None,
activations_checkpoint_num_layers=1,
layernorm_epsilon=1e-5,
bias_gelu_fusion=True,
bias_dropout_add_fusion=True,
masked_softmax_fusion=True,
persist_layer_norm=False,
openai_gelu=False,
onnx_safe=False,
activation='gelu',
bias=True,
parent_model_type=ModelType.encoder_or_decoder,
chunk_size=64,
):
super(MegatronRetrievalTransformerDecoderModule, self).__init__()
self.pre_process = pre_process
self.post_process = post_process
self.hidden_size = hidden_size
self.num_layers = num_layers
self.init_method = init_method
self.model_attn_mask_type = attn_mask_type
self.hidden_dropout = hidden_dropout
self.output_layer_init_method = output_layer_init_method
self.parent_model_type = parent_model_type
if kv_channels is None:
assert (
hidden_size % num_attention_heads == 0
), 'hidden_size must be divisible by num_attention_heads if kv_channels is None'
kv_channels = hidden_size // num_attention_heads
# Transformer.
self.model = ParallelTransformer(
init_method=self.init_method,
output_layer_init_method=self.output_layer_init_method,
num_layers=self.num_layers,
hidden_size=self.hidden_size,
num_attention_heads=num_attention_heads,
apply_query_key_layer_scaling=apply_query_key_layer_scaling,
kv_channels=kv_channels,
layer_type=layer_type,
ffn_hidden_size=ffn_hidden_size,
self_attn_mask_type=self.model_attn_mask_type,
pre_process=self.pre_process,
post_process=self.post_process,
precision=precision,
fp32_residual_connection=fp32_residual_connection,
activations_checkpoint_method=activations_checkpoint_method,
activations_checkpoint_num_layers=activations_checkpoint_num_layers,
layernorm_epsilon=layernorm_epsilon,
hidden_dropout=hidden_dropout,
attention_dropout=attention_dropout,
use_cpu_initialization=use_cpu_initialization,
bias_gelu_fusion=bias_gelu_fusion,
bias_dropout_fusion=bias_dropout_add_fusion,
masked_softmax_fusion=masked_softmax_fusion,
persist_layer_norm=persist_layer_norm,
openai_gelu=openai_gelu,
onnx_safe=onnx_safe,
activation=activation,
bias=bias,
model_type=parent_model_type,
chunk_size=chunk_size,
)
rot_dim = hidden_size // num_attention_heads if kv_channels is None else kv_channels
self.rotary_pos_emb = RotaryEmbedding(rot_dim)
self.chunk_size = chunk_size
self._model_key = 'model'
def set_input_tensor(self, input_tensor):
""" See megatron.model.transformer.set_input_tensor()"""
self.model.set_input_tensor(input_tensor)
def forward(
self,
dec_input,
dec_attn_mask,
retrieved_attn_mask=None,
retrieved_emb=None,
layer_past=None,
get_key_value=False,
):
# expected dec_input shape [batch, seq_len, dim]
# expected dec_attn_mask shape [batch, seq_len]
# expected retrieved_input shape [batch, num_chunks, num_neighbors, retrival_seq_len, dim]
# expected retrieved_attn_mask shape [batch, num_chunks, num_neighbors, retrival_seq_len]
# batch, seq_len, dim
_, n, _ = dec_input.shape
num_seq_chunks = n // self.chunk_size
if retrieved_emb is not None:
b, k, r, rn, dim = retrieved_emb.shape
assert (
k == num_seq_chunks
), f'sequence requires {num_seq_chunks} retrieved chunks, but only {k} passed in' # need to add extra chunk size, since it will be shifted
self_attn_emb = self.rotary_pos_emb(n)
cross_attn_q_pos_emb = self.rotary_pos_emb(self.chunk_size * 2 - 1)
if retrieved_emb is not None:
cross_attn_k_pos_emb = self.rotary_pos_emb(rn, offset=0)
attn_pos_emb = (self_attn_emb, cross_attn_q_pos_emb, cross_attn_k_pos_emb)
else:
attn_pos_emb = (self_attn_emb, cross_attn_q_pos_emb, None)
# # convert to Megatron mask
dec_attn_mask_3d = build_attention_mask_3d(
source_mask=dec_attn_mask, target_mask=dec_attn_mask, attn_mask_type=self.model_attn_mask_type,
)
dec_attn_mask_3d = dec_attn_mask_3d[:, None, :, :]
if retrieved_emb is not None:
dec_attn_mask = rearrange(dec_attn_mask, 'b (k n) -> (b k) n', k=k)
retrieved_attn_mask = rearrange(retrieved_attn_mask, 'b k r n -> (b k) (r n)')
enc_dec_attn_mask_3d = build_attention_mask_3d(
source_mask=dec_attn_mask, target_mask=retrieved_attn_mask, attn_mask_type=AttnMaskType.padding,
)
enc_dec_attn_mask_3d = enc_dec_attn_mask_3d[:, None, :, :]
else:
enc_dec_attn_mask_3d = None
# transformer encoder
enc_output = self.model(
dec_input,
dec_attn_mask_3d,
layer_past=layer_past,
get_key_value=get_key_value,
encoder_output=None,
retrieved_emb=retrieved_emb,
enc_dec_attn_mask=enc_dec_attn_mask_3d,
rotary_pos_emb=attn_pos_emb,
)
return enc_output
def state_dict_for_save_checkpoint(self, destination=None, prefix='', keep_vars=False):
"""For easy load."""
state_dict_ = {}
state_dict_[self._model_key] = self.model.state_dict_for_save_checkpoint(destination, prefix, keep_vars)
return state_dict_
def load_state_dict(self, state_dict, strict=True):
"""Customized load."""
# Encoder.
if self._model_key in state_dict:
state_dict_ = state_dict[self._model_key]
self.model.load_state_dict(state_dict_, strict=strict)
| 38.502591
| 151
| 0.671646
| 1,890
| 14,862
| 4.856085
| 0.125926
| 0.050556
| 0.026367
| 0.018414
| 0.803443
| 0.786664
| 0.77032
| 0.750817
| 0.744062
| 0.714317
| 0
| 0.006348
| 0.258041
| 14,862
| 385
| 152
| 38.602597
| 0.825973
| 0.113646
| 0
| 0.761246
| 0
| 0
| 0.035905
| 0
| 0
| 0
| 0
| 0
| 0.013841
| 1
| 0.034602
| false
| 0.00692
| 0.024221
| 0
| 0.079585
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
545c827be357dc5f796bb22fa5998ef872976680
| 39
|
py
|
Python
|
oknlp/auto_config/__init__.py
|
PLNUHT/ink
|
3542bdfbd7281118c62e0b1ab965dc7ddf279a6d
|
[
"MIT"
] | 6
|
2021-09-01T02:47:48.000Z
|
2021-12-21T18:40:05.000Z
|
oknlp/auto_config/__init__.py
|
PLNUHT/ink
|
3542bdfbd7281118c62e0b1ab965dc7ddf279a6d
|
[
"MIT"
] | 1
|
2021-08-10T06:06:23.000Z
|
2021-08-10T06:06:23.000Z
|
oknlp/auto_config/__init__.py
|
thunlp/oknlp
|
3542bdfbd7281118c62e0b1ab965dc7ddf279a6d
|
[
"MIT"
] | 2
|
2020-08-10T15:13:41.000Z
|
2021-02-01T03:01:37.000Z
|
from .get_provider import get_provider
| 19.5
| 38
| 0.871795
| 6
| 39
| 5.333333
| 0.666667
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
54a28c087e61e13ad37b8a631a5cf37dbe90252e
| 206
|
py
|
Python
|
auglichem/crystal/models/__init__.py
|
BaratiLab/AugLiChem
|
37258b5ce2c653436b3e819b58d2659052d6edcc
|
[
"MIT"
] | 16
|
2021-12-01T06:32:41.000Z
|
2022-03-26T18:11:49.000Z
|
auglichem/crystal/models/__init__.py
|
BaratiLab/AugLiChem
|
37258b5ce2c653436b3e819b58d2659052d6edcc
|
[
"MIT"
] | 1
|
2022-02-14T20:59:45.000Z
|
2022-02-14T20:59:45.000Z
|
auglichem/crystal/models/__init__.py
|
BaratiLab/AugLiChem
|
37258b5ce2c653436b3e819b58d2659052d6edcc
|
[
"MIT"
] | null | null | null |
from auglichem.crystal.models.cgcnn import CrystalGraphConvNet
from auglichem.crystal.models.schnet import SchNet
from auglichem.crystal.models.gin import GINet
from auglichem.crystal.models.gcn import GCN
| 41.2
| 62
| 0.864078
| 28
| 206
| 6.357143
| 0.392857
| 0.292135
| 0.449438
| 0.58427
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07767
| 206
| 4
| 63
| 51.5
| 0.936842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
54c3306608c1bf6b188efefbbd2180c23520b7cf
| 618
|
py
|
Python
|
training/utils/data_logs.py
|
ristea/arim
|
1ca94f9b83c38e3c115c16c5d7b62e626303f7c8
|
[
"MIT"
] | 10
|
2020-08-06T08:03:08.000Z
|
2021-11-15T21:37:17.000Z
|
training/utils/data_logs.py
|
ristea/arim
|
1ca94f9b83c38e3c115c16c5d7b62e626303f7c8
|
[
"MIT"
] | null | null | null |
training/utils/data_logs.py
|
ristea/arim
|
1ca94f9b83c38e3c115c16c5d7b62e626303f7c8
|
[
"MIT"
] | 2
|
2020-03-02T08:14:25.000Z
|
2020-09-25T14:34:50.000Z
|
def save_logs_train(path_to_history, details):
path_to_history = path_to_history + '/__hystoryTrain__.txt'
history = open(path_to_history, "a")
history.write(details + '\n')
history.close()
def save_logs_eval(path_to_history, details):
path_to_history = path_to_history + '/__hystoryEval__.txt'
history = open(path_to_history, "a")
history.write(details + '\n')
history.close()
def save_best_stats(path_to_history, details):
path_to_history = path_to_history + '/__best_stats__.txt'
history = open(path_to_history, "a")
history.write(details + '\n')
history.close()
| 30.9
| 63
| 0.711974
| 85
| 618
| 4.670588
| 0.223529
| 0.18136
| 0.392947
| 0.151134
| 0.836272
| 0.836272
| 0.836272
| 0.836272
| 0.836272
| 0.836272
| 0
| 0
| 0.161812
| 618
| 20
| 64
| 30.9
| 0.766409
| 0
| 0
| 0.6
| 0
| 0
| 0.11165
| 0.033981
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b708e3a27e50fa7e1ad35579b24f1f47554218ce
| 109
|
py
|
Python
|
doc/sphinx/source/user_manual/__init__.py
|
tneele/mCRL2
|
8f2d730d650ffec15130d6419f69c50f81e5125c
|
[
"BSL-1.0"
] | null | null | null |
doc/sphinx/source/user_manual/__init__.py
|
tneele/mCRL2
|
8f2d730d650ffec15130d6419f69c50f81e5125c
|
[
"BSL-1.0"
] | null | null | null |
doc/sphinx/source/user_manual/__init__.py
|
tneele/mCRL2
|
8f2d730d650ffec15130d6419f69c50f81e5125c
|
[
"BSL-1.0"
] | null | null | null |
import tools
def generate_rst(temppath, outpath, binpath):
tools.generate_rst(temppath, outpath, binpath)
| 21.8
| 48
| 0.798165
| 14
| 109
| 6.071429
| 0.571429
| 0.258824
| 0.447059
| 0.611765
| 0.776471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110092
| 109
| 4
| 49
| 27.25
| 0.876289
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
b73d34ed064dd90a468d2b9f8730147470d2aae7
| 115
|
py
|
Python
|
fcis/datasets/__init__.py
|
knorth55/chainer-fcis
|
a3dcebf5c31395dbd4b596509707bc9fe91a06e0
|
[
"MIT"
] | 45
|
2017-10-06T12:24:17.000Z
|
2021-11-10T05:28:31.000Z
|
fcis/datasets/__init__.py
|
knorth55/chainer-fcis
|
a3dcebf5c31395dbd4b596509707bc9fe91a06e0
|
[
"MIT"
] | 14
|
2017-10-13T11:03:54.000Z
|
2018-12-12T04:48:35.000Z
|
fcis/datasets/__init__.py
|
knorth55/chainer-fcis
|
a3dcebf5c31395dbd4b596509707bc9fe91a06e0
|
[
"MIT"
] | 10
|
2017-10-13T09:24:15.000Z
|
2020-07-12T09:05:47.000Z
|
from fcis.datasets import coco # NOQA
from fcis.datasets import sbd # NOQA
from fcis.datasets import voc # NOQA
| 28.75
| 38
| 0.765217
| 18
| 115
| 4.888889
| 0.444444
| 0.272727
| 0.545455
| 0.75
| 0.590909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182609
| 115
| 3
| 39
| 38.333333
| 0.93617
| 0.121739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3f7a7627381f09b8ba1d17fdc004cbb8f53d492b
| 128
|
py
|
Python
|
galry/visuals/fontmaps/__init__.py
|
fiath/test
|
b50898dafa90e93da48f573e0b3feb1bb6acd8de
|
[
"MIT",
"BSD-3-Clause"
] | 55
|
2015-01-12T06:08:36.000Z
|
2021-08-13T17:24:50.000Z
|
galry/visuals/fontmaps/__init__.py
|
fiath/test
|
b50898dafa90e93da48f573e0b3feb1bb6acd8de
|
[
"MIT",
"BSD-3-Clause"
] | 2
|
2017-03-08T12:04:22.000Z
|
2017-07-27T07:13:00.000Z
|
galry/visuals/fontmaps/__init__.py
|
fiath/test
|
b50898dafa90e93da48f573e0b3feb1bb6acd8de
|
[
"MIT",
"BSD-3-Clause"
] | 10
|
2015-01-01T10:51:38.000Z
|
2021-12-10T02:53:45.000Z
|
from tools import load_png, load_fnt, get_text_map, load_font
__all__ = ["load_png", "load_fnt", "get_text_map", "load_font"]
| 25.6
| 63
| 0.75
| 22
| 128
| 3.727273
| 0.5
| 0.170732
| 0.268293
| 0.341463
| 0.780488
| 0.780488
| 0.780488
| 0.780488
| 0.780488
| 0
| 0
| 0
| 0.117188
| 128
| 4
| 64
| 32
| 0.725664
| 0
| 0
| 0
| 0
| 0
| 0.291339
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
3fadf81e4dc2b84aac8b0ba70e3f718453dbf85e
| 14,525
|
py
|
Python
|
test/gspace/test_restrict_isometry.py
|
QUVA-Lab/escnn
|
59ed6b96f61f8616f87b3f25aa2f8abdb6f1a882
|
[
"BSD-3-Clause"
] | 4
|
2022-03-16T22:51:39.000Z
|
2022-03-18T18:45:49.000Z
|
test/gspace/test_restrict_isometry.py
|
QUVA-Lab/escnn
|
59ed6b96f61f8616f87b3f25aa2f8abdb6f1a882
|
[
"BSD-3-Clause"
] | null | null | null |
test/gspace/test_restrict_isometry.py
|
QUVA-Lab/escnn
|
59ed6b96f61f8616f87b3f25aa2f8abdb6f1a882
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from unittest import TestCase
from escnn.gspaces import *
from escnn.group import *
import numpy as np
class TestRestrictGSpace(TestCase):
####################################################################################################################
### ########################################################################################
### 2D isometries ########################################################################################
### ########################################################################################
####################################################################################################################
def test_restrict_rotations(self):
space = rot2dOnR2(-1, maximum_frequency=10)
subspace, mapping, _ = space.restrict(4)
self.assertIsInstance(subspace, GSpace2D)
self.assertEqual(subspace.fibergroup.order(), 4)
self.check_restriction(space, 4)
def test_restrict_rotations_to_trivial(self):
space = rot2dOnR2(-1, maximum_frequency=10)
subspace, mapping, _ = space.restrict(1)
self.assertIsInstance(subspace, GSpace2D)
self.assertEqual(subspace.fibergroup.order(), 1)
self.check_restriction(space, 1)
def test_restrict_flipsrotations(self):
space = flipRot2dOnR2(-1, maximum_frequency=10)
N=10
for axis in range(13):
axis = axis * np.pi / (13*N)
assert axis <= np.pi / N
subspace, mapping, _ = space.restrict((axis, N))
self.assertIsInstance(subspace, GSpace2D)
self.assertEqual(subspace.fibergroup.order(), 2 * N)
self.check_restriction(space, (axis, N))
def test_restrict_flipsrotations_to_rotations(self):
space = flipRot2dOnR2(-1, maximum_frequency=10)
subspace, mapping, _ = space.restrict((None, -1))
self.assertIsInstance(subspace, GSpace2D)
self.assertEqual(subspace.fibergroup.order(), -1)
self.check_restriction(space, (None, -1))
def test_restrict_flipsrotations_to_flips(self):
space = flipRot2dOnR2(-1, maximum_frequency=10)
for axis in range(13):
axis = axis * np.pi/13.
subspace, mapping, _ = space.restrict((axis, 1))
self.assertIsInstance(subspace, GSpace2D)
self.assertEqual(subspace.fibergroup.order(), 2)
self.check_restriction(space, (axis, 1))
def test_restrict_fliprotations_to_trivial(self):
space = flipRot2dOnR2(-1, maximum_frequency=10)
subspace, mapping, _ = space.restrict((None, 1))
self.assertIsInstance(subspace, GSpace2D)
self.assertEqual(subspace.fibergroup.order(), 1)
self.check_restriction(space, (None, 1))
def test_restrict_flips_to_trivial(self):
space = flip2dOnR2()
subspace, mapping, _ = space.restrict(1)
self.assertIsInstance(subspace, GSpace2D)
self.assertEqual(subspace.fibergroup.order(), 1)
self.check_restriction(space, 1)
####################################################################################################################
### ########################################################################################
### 3D isometries ########################################################################################
### ########################################################################################
####################################################################################################################
####### SO(3) ######################################################################################################
def test_restrict_3d_rotations_to_ico(self):
space = rot3dOnR3(maximum_frequency=2)
subspace, mapping, _ = space.restrict('ico')
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, Icosahedral)
self.check_restriction(space, 'ico')
def test_restrict_3d_rotations_to_octa(self):
space = rot3dOnR3(maximum_frequency=2)
subspace, mapping, _ = space.restrict('octa')
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, Octahedral)
self.check_restriction(space, 'octa')
def test_restrict_3d_rotations_to_2d_rotations(self):
space = rot3dOnR3(maximum_frequency=2)
for n in [-1, 1, 2, 4, 7]:
sg_id = False, n
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
if n > 0:
self.assertIsInstance(subspace.fibergroup, CyclicGroup)
else:
self.assertIsInstance(subspace.fibergroup, SO2)
self.assertEqual(subspace.fibergroup.order(), n)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotations_to_2d_rotations_reflections(self):
space = rot3dOnR3(maximum_frequency=2)
for n in [2, 4]:
for axis in [0., np.pi/2, np.pi/4]:
sg_id = axis, n
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, DihedralGroup)
self.assertEqual(subspace.fibergroup.order(), 2*n)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotations_to_2d_reflection(self):
space = rot3dOnR3(maximum_frequency=2)
for axis in [0., np.pi / 2, np.pi / 4]:
sg_id = axis, 1
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, CyclicGroup)
self.assertEqual(subspace.fibergroup.order(), 2)
self.check_restriction(space, sg_id)
####### O(3) #######################################################################################################
def test_restrict_3d_rotationinversion_so3(self):
space = flipRot3dOnR3(maximum_frequency=2)
sg_id = 'so3'
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, SO3)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_ico(self):
space = flipRot3dOnR3(maximum_frequency=2)
sg_id = False, 'ico'
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, Icosahedral)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_dih_o2(self):
space = flipRot3dOnR3(maximum_frequency=2)
for axis in [0., np.pi / 2, np.pi / 3, 2 * np.pi]:
for n in [2, 4]:
sg_id = (False, axis, n)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, DihedralGroup)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_so2(self):
space = flipRot3dOnR3(maximum_frequency=2)
for n in [2, 4]:
sg_id = (False, False, n)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, CyclicGroup)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_flip(self):
space = flipRot3dOnR3(maximum_frequency=2)
for axis in [0., np.pi / 2, np.pi / 3, 2 * np.pi]:
sg_id = (False, 2*axis, 1)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, CyclicGroup)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_cone_o2(self):
space = flipRot3dOnR3(maximum_frequency=2)
for n in [2, 4]:
# Cone aligned along Z axis
# i.e., rotation along Z axis
# on XY plane, mirror wrt Y axis (i.e. flip along X axis)
sg_id = ('cone', 0., n)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, DihedralGroup)
self.check_restriction(space, sg_id)
# Cone aligned along Z axis
# i.e., rotation along Z axis
# on XY plane, mirror wrt X axis (i.e. flip along Y axis)
sg_id = ('cone', np.pi, n)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, DihedralGroup)
self.check_restriction(space, sg_id)
xyz = space.fibergroup.element(
(
0,
np.array([
np.sqrt(1./3.) * np.sin(np.pi*2/3),
np.sqrt(1./3.) * np.sin(np.pi*2/3),
np.sqrt(1./3.) * np.sin(np.pi*2/3),
np.cos(np.pi*2/3),
])
),
'Q'
)
for adj in [xyz, xyz@xyz, xyz @ space.fibergroup.inversion, xyz @ xyz @ space.fibergroup.inversion]:
sg_id = ('cone', 0., n, adj)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, DihedralGroup)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_mir(self):
space = flipRot3dOnR3(maximum_frequency=2)
# mirror wrt Y axis (i.e. flip along X axis)
sg_id = ('cone', 0., 1)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, CyclicGroup)
self.check_restriction(space, sg_id)
# mirror wrt X axis (i.e. flip along Y axis)
sg_id = ('cone', np.pi, 1)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, CyclicGroup)
self.check_restriction(space, sg_id)
xyz = space.fibergroup.element(
(
0,
np.array([
np.sqrt(1. / 3.) * np.sin(np.pi * 2 / 3),
np.sqrt(1. / 3.) * np.sin(np.pi * 2 / 3),
np.sqrt(1. / 3.) * np.sin(np.pi * 2 / 3),
np.cos(np.pi * 2 / 3),
])
),
'Q'
)
for adj in [xyz, xyz @ xyz, xyz @ space.fibergroup.inversion, xyz @ xyz @ space.fibergroup.inversion]:
sg_id = ('cone', 0., 1, adj)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, CyclicGroup)
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_fullcylinder_c2xo2(self):
space = flipRot3dOnR3(maximum_frequency=2)
for n in [2, 4]:
sg_id = (True, True, n)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, DirectProductGroup)
self.assertIsInstance(subspace.fibergroup.G1, CyclicGroup) and subspace.fibergroup.order() == 2
self.assertIsInstance(subspace.fibergroup.G2, DihedralGroup) and subspace.fibergroup.order() == 2*n
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_cylinder_c2xso2(self):
space = flipRot3dOnR3(maximum_frequency=2)
for n in [2, 4]:
sg_id = (True, False, n)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, DirectProductGroup)
self.assertIsInstance(subspace.fibergroup.G1, CyclicGroup) and subspace.fibergroup.order() == 2
self.assertIsInstance(subspace.fibergroup.G2, CyclicGroup) and subspace.fibergroup.order() == n
self.check_restriction(space, sg_id)
def test_restrict_3d_rotationinversion_inv(self):
space = flipRot3dOnR3(maximum_frequency=2)
# Inversion wrt the origin
sg_id = (True, False, 1)
subspace, mapping, _ = space.restrict(sg_id)
self.assertIsInstance(subspace, GSpace3D)
self.assertIsInstance(subspace.fibergroup, CyclicGroup)
self.check_restriction(space, sg_id)
####################################################################################################################
def check_restriction(self, space: GSpace, subgroup_id):
subspace, parent, child = space.restrict(subgroup_id)
# rho = space.trivial_repr
irreps = space.fibergroup.irreps()
for rho in irreps:
sub_rho = rho.restrict(subgroup_id)
x = np.random.randn(1, rho.size, 3, 3, 3)
for e in subspace.testing_elements:
y1 = space.featurefield_action(x, rho, parent(e))
y2 = subspace.featurefield_action(x, sub_rho, e)
self.assertTrue(np.allclose(y1, y2), msg=f"{space.name} -> {subgroup_id}: {parent(e)} -> {e}")
if __name__ == '__main__':
unittest.main()
| 35.687961
| 120
| 0.52716
| 1,393
| 14,525
| 5.32448
| 0.099785
| 0.027504
| 0.188756
| 0.098153
| 0.867467
| 0.835243
| 0.816637
| 0.798975
| 0.767157
| 0.746124
| 0
| 0.024798
| 0.292048
| 14,525
| 406
| 121
| 35.775862
| 0.696489
| 0.033666
| 0
| 0.605932
| 0
| 0
| 0.008142
| 0
| 0
| 0
| 0
| 0
| 0.262712
| 1
| 0.097458
| false
| 0
| 0.021186
| 0
| 0.122881
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3fecc810968be5cc1aec13f51597818e52cab0fc
| 8,315
|
py
|
Python
|
guillotina/component/tests/test_globalregistry.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 173
|
2017-03-10T18:26:12.000Z
|
2022-03-03T06:48:56.000Z
|
guillotina/component/tests/test_globalregistry.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 921
|
2017-03-08T14:04:43.000Z
|
2022-03-30T10:28:56.000Z
|
guillotina/component/tests/test_globalregistry.py
|
rboixaderg/guillotina
|
fcae65c2185222272f3b8fee4bc2754e81e0e983
|
[
"BSD-2-Clause"
] | 60
|
2017-03-16T19:59:44.000Z
|
2022-03-03T06:48:59.000Z
|
##############################################################################
#
# Copyright (c) 2012 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
# flake8: noqa
import unittest
class Test_get_global_components(unittest.TestCase):
def _callFUT(self):
from guillotina.component.globalregistry import get_global_components
return get_global_components()
def test_gsm_is_IComponentLookup(self):
from guillotina.component.globalregistry import base
from guillotina.component.interfaces import IComponentLookup
gsm = self._callFUT()
self.assertTrue(gsm is base)
self.assertTrue(IComponentLookup.providedBy(gsm))
def test_gsm_is_singleton(self):
gsm = self._callFUT()
class Test_provide_utility(unittest.TestCase):
from guillotina.component.testing import setUp, tearDown
def _callFUT(self, *args, **kw):
from guillotina.component.globalregistry import provide_utility
return provide_utility(*args, **kw)
def test_anonymous_no_provides(self):
from zope.interface import Interface
from zope.interface import implementer
from guillotina.component.globalregistry import get_global_components
class IFoo(Interface):
pass
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
self._callFUT(foo)
gsm = get_global_components()
self.assertTrue(gsm.getUtility(IFoo, "") is foo)
def test_named_w_provides(self):
from zope.interface import Interface
from guillotina.component.globalregistry import get_global_components
class IFoo(Interface):
pass
class Foo(object):
pass
foo = Foo()
self._callFUT(foo, IFoo, "named")
gsm = get_global_components()
self.assertTrue(gsm.getUtility(IFoo, "named") is foo)
class Test_provide_adapter(unittest.TestCase):
from guillotina.component.testing import setUp, tearDown
def _callFUT(self, *args, **kw):
from guillotina.component.globalregistry import provide_adapter
return provide_adapter(*args, **kw)
def test_anonymous_no_provides_no_adapts(self):
from zope.interface import Interface
from zope.interface import implementer
from guillotina.component.globalregistry import get_global_components
from guillotina.component._declaration import adapter
class IFoo(Interface):
pass
class IBar(Interface):
pass
@implementer(IFoo)
class Foo(object):
pass
@adapter(IFoo)
@implementer(IBar)
class Bar(object):
def __init__(self, context):
self.context = context
self._callFUT(Bar)
gsm = get_global_components()
foo = Foo()
adapted = gsm.getAdapter(foo, IBar)
self.assertTrue(isinstance(adapted, Bar))
self.assertTrue(adapted.context is foo)
def test_named_w_provides_w_adapts(self):
from zope.interface import Interface
from zope.interface import implementer
from guillotina.component.globalregistry import get_global_components
class IFoo(Interface):
pass
class IBar(Interface):
pass
@implementer(IFoo)
class Foo(object):
pass
class Bar(object):
def __init__(self, context):
self.context = context
self._callFUT(Bar, (IFoo,), IBar, "test")
gsm = get_global_components()
foo = Foo()
adapted = gsm.getAdapter(foo, IBar, name="test")
self.assertTrue(isinstance(adapted, Bar))
self.assertTrue(adapted.context is foo)
class Test_provide_subscription_adapter(unittest.TestCase):
from guillotina.component.testing import setUp, tearDown
def _callFUT(self, *args, **kw):
from guillotina.component.globalregistry import provide_subscription_adapter
return provide_subscription_adapter(*args, **kw)
def test_no_provides_no_adapts(self):
from zope.interface import Interface
from zope.interface import implementer
from guillotina.component.globalregistry import get_global_components
from guillotina.component._declaration import adapter
class IFoo(Interface):
pass
class IBar(Interface):
pass
@implementer(IFoo)
class Foo(object):
pass
@adapter(IFoo)
@implementer(IBar)
class Bar(object):
def __init__(self, context):
self.context = context
self._callFUT(Bar)
gsm = get_global_components()
foo = Foo()
adapted = gsm.subscribers((foo,), IBar)
self.assertEqual(len(adapted), 1)
self.assertTrue(isinstance(adapted[0], Bar))
self.assertTrue(adapted[0].context is foo)
def test_w_provides_w_adapts(self):
from zope.interface import Interface
from zope.interface import implementer
from guillotina.component.globalregistry import get_global_components
class IFoo(Interface):
pass
class IBar(Interface):
pass
@implementer(IFoo)
class Foo(object):
pass
class Bar(object):
def __init__(self, context):
self.context = context
self._callFUT(Bar, (IFoo,), IBar)
gsm = get_global_components()
foo = Foo()
adapted = gsm.subscribers((foo,), IBar)
self.assertEqual(len(adapted), 1)
self.assertTrue(isinstance(adapted[0], Bar))
self.assertTrue(adapted[0].context is foo)
class Test_provide_handler(unittest.TestCase):
from guillotina.component.testing import setUp, tearDown
def _callFUT(self, *args, **kw):
from guillotina.component.globalregistry import provide_handler
return provide_handler(*args, **kw)
def test_no_adapts(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface import providedBy
from guillotina.component.globalregistry import get_global_components
from guillotina.component._declaration import adapter
class IFoo(Interface):
pass
@implementer(IFoo)
class Foo(object):
pass
@adapter(IFoo)
def _handler(context):
assert 0, "DON'T GO HERE"
self._callFUT(_handler)
gsm = get_global_components()
regs = list(gsm.registeredHandlers())
self.assertEqual(len(regs), 1)
hr = regs[0]
self.assertEqual(list(hr.required), list(providedBy(Foo())))
self.assertEqual(hr.name, "")
self.assertTrue(hr.factory is _handler)
def test_w_adapts(self):
from zope.interface import Interface
from guillotina.component.globalregistry import get_global_components
class IFoo(Interface):
pass
def _handler(context):
assert 0, "DON'T GO HERE"
self._callFUT(_handler, (IFoo,))
gsm = get_global_components()
regs = list(gsm.registeredHandlers())
self.assertEqual(len(regs), 1)
hr = regs[0]
self.assertEqual(list(hr.required), [IFoo])
self.assertEqual(hr.name, "")
self.assertTrue(hr.factory is _handler)
def test_suite():
return unittest.TestSuite(
(
unittest.makeSuite(Test_get_global_components),
unittest.makeSuite(Test_provide_utility),
unittest.makeSuite(Test_provide_adapter),
unittest.makeSuite(Test_provide_subscription_adapter),
unittest.makeSuite(Test_provide_handler),
)
)
| 30.126812
| 84
| 0.638244
| 886
| 8,315
| 5.826185
| 0.141084
| 0.059667
| 0.098024
| 0.066835
| 0.810151
| 0.766757
| 0.750678
| 0.735955
| 0.720845
| 0.688687
| 0
| 0.003103
| 0.26362
| 8,315
| 275
| 85
| 30.236364
| 0.839948
| 0.056284
| 0
| 0.723958
| 0
| 0
| 0.005731
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.114583
| false
| 0.098958
| 0.197917
| 0.005208
| 0.489583
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
3ff95e8064490e102d1489543906b829bfd408c2
| 152,113
|
py
|
Python
|
src/openprocurement/tender/limited/tests/award_blanks.py
|
pontostroy/api
|
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
|
[
"Apache-2.0"
] | 3
|
2020-03-13T06:44:23.000Z
|
2020-11-05T18:25:29.000Z
|
src/openprocurement/tender/limited/tests/award_blanks.py
|
pontostroy/api
|
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
|
[
"Apache-2.0"
] | 2
|
2021-03-25T23:29:58.000Z
|
2022-03-21T22:18:37.000Z
|
src/openprocurement/tender/limited/tests/award_blanks.py
|
pontostroy/api
|
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
|
[
"Apache-2.0"
] | 3
|
2020-10-16T16:25:14.000Z
|
2021-05-22T12:26:20.000Z
|
# -*- coding: utf-8 -*-
from copy import deepcopy
from datetime import timedelta
import dateutil
from mock import patch
from openprocurement.api.constants import RELEASE_2020_04_19
from openprocurement.api.utils import get_now
from openprocurement.tender.core.tests.base import change_auth
from openprocurement.tender.core.tests.cancellation import activate_cancellation_after_2020_04_19
from openprocurement.tender.belowthreshold.tests.base import (
test_organization,
test_author,
test_cancellation,
test_complaint,
test_claim,
test_draft_claim,
test_draft_complaint,
)
# TenderAwardResourceTest
def create_tender_award_invalid(self):
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post(request_path, "data", status=415)
self.assertEqual(response.status, "415 Unsupported Media Type")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"],
[
{
u"description": u"Content-Type header should be one of ['application/json']",
u"location": u"header",
u"name": u"Content-Type",
}
],
)
response = self.app.post(request_path, "data", content_type="application/json", status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"],
[{u"description": u"No JSON object could be decoded", u"location": u"body", u"name": u"data"}],
)
response = self.app.post_json(request_path, "data", status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Data not available", u"location": u"body", u"name": u"data"}]
)
response = self.app.post_json(request_path, {"not_data": {}}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Data not available", u"location": u"body", u"name": u"data"}]
)
response = self.app.post_json(request_path, {"data": {"invalid_field": "invalid_value"}}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Rogue field", u"location": u"body", u"name": u"invalid_field"}]
)
response = self.app.post_json(request_path, {"data": {"suppliers": [{"identifier": "invalid_value"}]}}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"],
[
{
u"description": {
u"identifier": [u"Please use a mapping for this field or Identifier instance instead of unicode."]
},
u"location": u"body",
u"name": u"suppliers",
}
],
)
response = self.app.post_json(
"/tenders/some_id/awards", {"data": {"suppliers": [test_organization], "bid_id": "some_id"}}, status=404
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
response = self.app.get("/tenders/some_id/awards", status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
self.set_status("complete")
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "status": "pending"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't create award in current (complete) tender status"
)
def create_tender_award(self):
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"items": self.test_tender_data_local["items"],
"status": "pending",
"qualified": True,
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.assertEqual(award["suppliers"][0]["name"], test_organization["name"])
self.assertIn("id", award)
self.assertNotIn("items", award)
self.assertIn(award["id"], response.headers["Location"])
self.assertEqual(response.json["data"]["subcontractingDetails"], "Details")
if self.initial_data["procurementMethodType"] == "reporting":
self.assertNotIn("qualified", award)
else:
self.assertEqual(award["qualified"], True)
response = self.app.get(request_path)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"][-1], award)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"description": "description data"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"subcontractingDetails": "subcontractingDetails"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["subcontractingDetails"], "subcontractingDetails")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"active")
response = self.app.get("/tenders/{}".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"active")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"cancelled")
def canceling_created_award_and_create_new_one(self):
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path, {"data": {"suppliers": [test_organization], "qualified": True, "status": "pending"}}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.assertEqual(award["suppliers"][0]["name"], test_organization["name"])
self.assertIn("id", award)
self.assertIn(award["id"], response.headers["Location"])
response = self.app.get(request_path)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"][-1], award)
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "status": "pending"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"], "Can't create new award while any (pending) award exists"
)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"active")
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "status": "pending"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"], "Can't create new award while any (active) award exists"
)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"cancelled")
# Create new award
response = self.app.post_json(
request_path, {"data": {"suppliers": [test_organization], "qualified": True, "status": "pending"}}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
new_award = response.json["data"]
self.assertEqual(new_award["suppliers"][0]["name"], test_organization["name"])
self.assertIn("id", new_award)
self.assertIn(new_award["id"], response.headers["Location"])
# Add document to new award
response = self.app.post(
"/tenders/{}/awards/{}/documents?acc_token={}".format(self.tender_id, new_award["id"], self.tender_token),
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
doc_id = response.json["data"]["id"]
self.assertIn(doc_id, response.headers["Location"])
self.assertEqual("name.doc", response.json["data"]["title"])
response = self.app.get("/tenders/{}/awards/{}/documents".format(self.tender_id, new_award["id"]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"][0]["id"])
# patch new award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, new_award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"active")
def patch_tender_award(self):
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{"data": {"suppliers": [test_organization], "qualified": True, "status": u"pending", "value": {"amount": 500}}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"items": self.test_tender_data_local["items"]}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.body, u"null")
response = self.app.patch_json(
"/tenders/{}/awards/some_id".format(self.tender_id), {"data": {"status": "unsuccessful"}}, status=404
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.patch_json("/tenders/some_id/awards/some_id", {"data": {"status": "unsuccessful"}}, status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"awardStatus": "unsuccessful"}},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"], [{"location": "body", "name": "awardStatus", "description": "Rogue field"}]
)
# set/update award title
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"title": "award title"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["title"], "award title")
self.assertNotIn("items", response.json["data"])
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"title": "award title2"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["title"], "award title2")
# update supplier info
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"suppliers": [{"name": "another supplier"}]}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["suppliers"][0]["name"], "another supplier")
# update value
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"value": {"amount": 499}}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["value"]["amount"], 499)
# change status
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
# try to update award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"title": "award title"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (active) status")
# patch status for create new award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
# create new award and test other states
response = self.app.post_json(
request_path, {"data": {"suppliers": [test_organization], "status": u"pending", "value": {"amount": 500}}}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "pending"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (unsuccessful) status")
response = self.app.post_json(
request_path, {"data": {"suppliers": [test_organization], "status": u"pending", "value": {"amount": 500}}}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active", "qualified": True}},
)
self.assertEqual(response.status, "200 OK")
active_award = award
response = self.app.get(request_path)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(len(response.json["data"]), 3)
# sign contract to complete tender
tender = self.db.get(self.tender_id)
for i in tender.get("awards", []):
if i.get("complaintPeriod", {}): # works for negotiation tender
i["complaintPeriod"]["endDate"] = i["complaintPeriod"]["startDate"]
self.db.save(tender)
response = self.app.get("/tenders/{}/contracts".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response.json["data"]), 2)
contract = response.json["data"][1]
self.assertEqual(contract["awardID"], active_award["id"])
response = self.app.patch_json(
"/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token),
{"data": {"status": "active", "value": {"valueAddedTaxIncluded": False}}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.get("/tenders/{}/awards/{}".format(self.tender_id, award["id"]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["value"]["amount"], 500)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update award in current (complete) tender status"
)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "unsuccessful"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update award in current (complete) tender status"
)
def check_tender_award_complaint_period_dates(self):
# self.app.authorization = ("Basic", ("token", ""))
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{"data": {"suppliers": [test_organization], "qualified": True, "status": u"pending", "value": {"amount": 500}}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
updated_award = response.json["data"]
self.assertIn("startDate", updated_award["complaintPeriod"])
def patch_tender_award_unsuccessful(self):
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{"data": {"suppliers": [test_organization], "qualified": True, "status": u"pending", "value": {"amount": 500}}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"title": "award title"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (unsuccessful) status")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (unsuccessful) status")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (unsuccessful) status")
response = self.app.post(
"/tenders/{}/awards/{}/documents?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
upload_files=[("file", "name.doc", "content")],
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add document in current (unsuccessful) award status"
)
def get_tender_award(self):
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "qualified": True, "status": "pending"}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.get("/tenders/{}/awards/{}".format(self.tender_id, award["id"]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
award_data = response.json["data"]
self.assertEqual(award_data, award)
response = self.app.get("/tenders/{}/awards/some_id".format(self.tender_id), status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.get("/tenders/some_id/awards/some_id", status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
def activate_contract_with_cancelled_award(self):
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "qualified": True, "status": "pending"}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.json["data"]["status"], "pending")
award = response.json["data"]
# Activate award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
# Get contract
response = self.app.get("/tenders/{}/contracts".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
contract = response.json["data"][0]
# Cancel award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
# Try to sign in contract
response = self.app.patch_json(
"/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token),
{"data": {"status": "active"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update contract in current (cancelled) status")
# TenderAwardComplaintResourceTest
def create_tender_award_complaints(self):
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path, {"data": {"suppliers": [test_organization], "qualified": True, "status": "pending"}}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.award_id = award["id"]
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
status=404,
)
self.assertEqual(response.status, "404 Not Found")
# TenderNegotiationAwardResourceTest
def patch_tender_award_Administrator_change(self):
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "status": "pending"}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
now = get_now().isoformat()
award["complaintPeriod"] = {"startDate": now, "endDate": now}
authorization = self.app.authorization
self.app.authorization = ("Basic", ("administrator", ""))
response = self.app.patch_json(
"/tenders/{}/awards/{}".format(self.tender_id, award["id"]),
{"data": award},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertIn("endDate", response.json["data"]["complaintPeriod"])
self.assertEqual(response.json["data"], award)
def patch_active_not_qualified(self):
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{"data": {"suppliers": [test_organization], "subcontractingDetails": "Details", "status": "pending"}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.assertEqual(award["suppliers"][0]["name"], test_organization["name"])
self.assertIn("id", award)
self.assertIn(award["id"], response.headers["Location"])
self.assertEqual(response.json["data"]["subcontractingDetails"], "Details")
response = self.app.get(request_path)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"][-1], award)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update award to active status with not qualified"
)
def create_two_awards_on_one_lot(self):
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
response = self.app.patch_json(
"/tenders/{}?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"items": [{"relatedLot": lot["id"]}]}},
)
self.assertEqual(response.status, "200 OK")
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"lotID": lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"lotID": lot["id"],
}
},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't create new award on lot while any (pending) award exists"
)
# TenderNegotiationLotAwardResourceTest
def create_award_with_lot(self):
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
self.assertEqual(lot["value"]["currency"], "UAH")
# try create without lotID field
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"qualified": True,
}
},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{"location": "body", "name": "lotID", "description": ["This field is required."]}]
)
# send with lotID
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"qualified": True,
"lotID": lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
# check fields
self.assertEqual(award["suppliers"][0]["name"], test_organization["name"])
self.assertIn("id", award)
self.assertIn(award["id"], response.headers["Location"])
self.assertEqual(response.json["data"]["subcontractingDetails"], "Details")
if self.initial_data["procurementMethodType"] == "reporting":
self.assertNotIn("qualified", award)
else:
self.assertEqual(award["qualified"], True)
# get award which we create before
response = self.app.get(request_path)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"][-1], award)
def create_tender_award_with_lot(self):
# create lot
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"qualified": True,
"lotID": lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.assertEqual(award["suppliers"][0]["name"], test_organization["name"])
self.assertIn("id", award)
self.assertIn(award["id"], response.headers["Location"])
self.assertIn(award["lotID"], lot["id"])
self.assertEqual(response.json["data"]["subcontractingDetails"], "Details")
if self.initial_data["procurementMethodType"] == "reporting":
self.assertNotIn("qualified", award)
else:
self.assertEqual(award["qualified"], True)
response = self.app.get(request_path)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"][-1], award)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"description": "description data"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"subcontractingDetails": "subcontractingDetails"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["subcontractingDetails"], "subcontractingDetails")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"active")
response = self.app.get("/tenders/{}".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"active")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"cancelled")
# self.assertIn('Location', response.headers)
def canceling_created_lot_award_and_create_new_one(self):
# create lot
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
# create award
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{"data": {"suppliers": [test_organization], "qualified": True, "status": "pending", "lotID": lot["id"]}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.assertEqual(award["suppliers"][0]["name"], test_organization["name"])
self.assertIn("id", award)
self.assertIn(award["id"], response.headers["Location"])
response = self.app.get(request_path)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"][-1], award)
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "status": "pending", "lotID": lot["id"]}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"], "Can't create new award on lot while any (pending) award exists"
)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"active")
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "status": "pending", "lotID": lot["id"]}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"], "Can't create new award on lot while any (active) award exists"
)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"cancelled")
# Create new award
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{"data": {"suppliers": [test_organization], "qualified": True, "status": "pending", "lotID": lot["id"]}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
new_award = response.json["data"]
self.assertEqual(new_award["suppliers"][0]["name"], test_organization["name"])
self.assertIn("id", new_award)
self.assertIn(new_award["id"], response.headers["Location"])
# Add document to new award
response = self.app.post(
"/tenders/{}/awards/{}/documents?acc_token={}".format(self.tender_id, new_award["id"], self.tender_token),
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
doc_id = response.json["data"]["id"]
self.assertIn(doc_id, response.headers["Location"])
self.assertEqual("name.doc", response.json["data"]["title"])
response = self.app.get("/tenders/{}/awards/{}/documents".format(self.tender_id, new_award["id"]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"][0]["id"])
# patch new award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, new_award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], u"active")
def patch_tender_lot_award(self):
# create lot
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"qualified": True,
"status": u"pending",
"value": {"amount": 500},
"lotID": lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"items": self.test_tender_data_local["items"]}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.body, u"null")
response = self.app.patch_json(
"/tenders/{}/awards/some_id".format(self.tender_id), {"data": {"status": "unsuccessful"}}, status=404
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.patch_json("/tenders/some_id/awards/some_id", {"data": {"status": "unsuccessful"}}, status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"awardStatus": "unsuccessful"}},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"], [{"location": "body", "name": "awardStatus", "description": "Rogue field"}]
)
# set/update award title
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"title": "award title"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["title"], "award title")
self.assertNotIn("items", response.json["data"])
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"title": "award title2"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["title"], "award title2")
# update supplier info
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"suppliers": [{"name": "another supplier"}]}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["suppliers"][0]["name"], "another supplier")
# update value
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"value": {"amount": 499}}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["value"]["amount"], 499)
# change status
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
# try to update award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"title": "award title"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (active) status")
# patch status for create new award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
# create new award and test other states
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"status": u"pending",
"value": {"amount": 500},
"lotID": lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "pending"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (unsuccessful) status")
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"status": u"pending",
"value": {"amount": 500},
"lotID": lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active", "qualified": True}},
)
self.assertEqual(response.status, "200 OK")
active_award = award
response = self.app.get(request_path)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(len(response.json["data"]), 3)
# sign contract to complete tender
tender = self.db.get(self.tender_id)
for i in tender.get("awards", []):
if i.get("complaintPeriod", {}): # works for negotiation tender
i["complaintPeriod"]["endDate"] = i["complaintPeriod"]["startDate"]
self.db.save(tender)
response = self.app.get("/tenders/{}/contracts".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response.json["data"]), 2)
contract = response.json["data"][1]
self.assertEqual(contract["awardID"], active_award["id"])
response = self.app.patch_json(
"/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token),
{"data": {"status": "active", "value": {"valueAddedTaxIncluded": False}}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.get("/tenders/{}/awards/{}".format(self.tender_id, award["id"]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["value"]["amount"], 500)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update award in current (complete) tender status"
)
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "unsuccessful"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update award in current (complete) tender status"
)
def patch_tender_lot_award_unsuccessful(self):
# create lot
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
# create award
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"qualified": True,
"status": u"pending",
"value": {"amount": 500},
"lotID": lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"title": "award title"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (unsuccessful) status")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (unsuccessful) status")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't update award in current (unsuccessful) status")
response = self.app.post(
"/tenders/{}/awards/{}/documents?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
upload_files=[("file", "name.doc", "content")],
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add document in current (unsuccessful) award status"
)
def get_tender_lot_award(self):
# create lot
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "qualified": True, "status": "pending", "lotID": lot["id"]}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
response = self.app.get("/tenders/{}/awards/{}".format(self.tender_id, award["id"]))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
award_data = response.json["data"]
self.assertEqual(award_data, award)
response = self.app.get("/tenders/{}/awards/some_id".format(self.tender_id), status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.get("/tenders/some_id/awards/some_id", status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
def two_lot_two_awards(self):
self.app.patch_json(
"/tenders/{}?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"items": self.test_tender_negotiation_data_local["items"] * 2}},
)
# create lot
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot1 = response.json["data"]
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot2 = response.json["data"]
self.app.patch_json(
"/tenders/{}?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"items": [{"relatedLot": lot1["id"]}, {"relatedLot": lot2["id"]}]}},
)
# create first award
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"qualified": True,
"lotID": lot1["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
# create second award
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"qualified": True,
"lotID": lot2["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
# try create another awards
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"qualified": True,
"lotID": lot1["id"],
}
},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't create new award on lot while any (pending) award exists"
)
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"qualified": True,
"lotID": lot2["id"],
}
},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't create new award on lot while any (pending) award exists"
)
response = self.app.get("/tenders/{}/lots?acc_token{}".format(self.tender_id, self.tender_token))
lots_len = len(response.json["data"])
# try create another lot
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token),
{"data": self.test_lots_data[0]},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], "Can't add lot when you have awards")
response = self.app.get("/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token))
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response.json["data"]), lots_len)
def cancel_award(self):
# create lot
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
# create award
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"suppliers": [test_organization], "qualified": True, "status": "pending", "lotID": lot["id"]}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
# activate award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
# check created contract
response = self.app.get("/tenders/{}/contracts".format(self.tender_id))
self.assertEqual(len(response.json["data"]), 1)
contract = response.json["data"][0]
# cancel award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "cancelled")
# check contract
response = self.app.get("/tenders/{}/contracts/{}".format(self.tender_id, contract["id"]))
self.assertEqual(response.json["data"]["status"], "cancelled")
def create_award_on_cancel_lot(self):
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
response = self.app.patch_json(
"/tenders/{}?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"items": [{"relatedLot": lot["id"]}]}},
)
self.assertEqual(response.status, "200 OK")
# Create cancellation on lot
cancellation = dict(**test_cancellation)
cancellation.update({
"cancellationOf": "lot",
"relatedLot": lot["id"],
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
cancellation_id = response.json["data"]["id"]
if RELEASE_2020_04_19 < get_now():
response = self.app.post(
"/tenders/{}/cancellations/{}/documents?acc_token={}".format(
self.tender_id, cancellation_id, self.tender_token
),
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "201 Created")
response = self.app.patch_json(
"/tenders/{}/cancellations/{}?acc_token={}".format(self.tender_id, cancellation_id, self.tender_token),
{"data": {"status": "active"}},
)
else:
self.assertEqual(response.json["data"]["status"], "pending")
response = self.app.post_json(
"/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token),
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"lotID": lot["id"],
}
},
status=403,
)
if RELEASE_2020_04_19 < get_now():
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't create award in current (cancelled) tender status",
)
else:
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't add award while cancellation for corresponding lot exists",
)
def patch_award_on_cancel_lot(self):
response = self.app.post_json(
"/tenders/{}/lots?acc_token={}".format(self.tender_id, self.tender_token), {"data": self.test_lots_data[0]}
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
lot = response.json["data"]
response = self.app.patch_json(
"/tenders/{}?acc_token={}".format(self.tender_id, self.tender_token),
{"data": {"items": [{"relatedLot": lot["id"]}]}},
)
self.assertEqual(response.status, "200 OK")
# Create award
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"subcontractingDetails": "Details",
"status": "pending",
"lotID": lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
award = response.json["data"]
# Create cancellation on lot
cancellation = dict(**test_cancellation)
cancellation.update({
"cancellationOf": "lot",
"relatedLot": lot["id"]
})
response = self.app.post_json(
"/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token),
{"data": cancellation},
)
self.assertEqual(response.status, "201 Created")
cancellation_id = response.json["data"]["id"]
if RELEASE_2020_04_19 < get_now():
response = self.app.post(
"/tenders/{}/cancellations/{}/documents?acc_token={}".format(
self.tender_id, cancellation_id, self.tender_token
),
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "201 Created")
response = self.app.patch_json(
"/tenders/{}/cancellations/{}?acc_token={}".format(self.tender_id, cancellation_id, self.tender_token),
{"data": {"status": "active"}},
)
self.app.get("/tenders/{}/cancellations".format(self.tender_id))
# Try to edit award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, award["id"], self.tender_token),
{"data": {"status": "active"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
if RELEASE_2020_04_19 < get_now():
self.assertEqual(
response.json["errors"][0]["description"],
"Can't update award in current (cancelled) tender status",
)
else:
self.assertEqual(
response.json["errors"][0]["description"],
"Can't update award while cancellation for corresponding lot exists",
)
# TenderNegotiationAwardComplaintResourceTest
def create_tender_award_complaint_invalid(self):
response = self.app.post_json(
"/tenders/some_id/awards/some_id/complaints",
{"data": test_draft_claim},
status=404,
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
request_path = "/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id)
response = self.app.post(request_path, "data", status=415)
self.assertEqual(response.status, "415 Unsupported Media Type")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"],
[
{
u"description": u"Content-Type header should be one of ['application/json']",
u"location": u"header",
u"name": u"Content-Type",
}
],
)
response = self.app.post(request_path, "data", content_type="application/json", status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"],
[{u"description": u"No JSON object could be decoded", u"location": u"body", u"name": u"data"}],
)
response = self.app.post_json(request_path, "data", status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Data not available", u"location": u"body", u"name": u"data"}]
)
response = self.app.post_json(request_path, {"not_data": {}}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Data not available", u"location": u"body", u"name": u"data"}]
)
response = self.app.post_json(request_path, {"data": {}}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"],
[
{u"description": [u"This field is required."], u"location": u"body", u"name": u"author"},
{u"description": [u"This field is required."], u"location": u"body", u"name": u"title"},
],
)
response = self.app.post_json(request_path, {"data": {"invalid_field": "invalid_value"}}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Rogue field", u"location": u"body", u"name": u"invalid_field"}]
)
response = self.app.post_json(request_path, {"data": {"author": {"identifier": "invalid_value"}}}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"],
[
{
u"description": {
u"identifier": [u"Please use a mapping for this field or ComplaintIdentifier instance instead of unicode."]
},
u"location": u"body",
u"name": u"author",
}
],
)
def create_tender_negotiation_award_complaints(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "unsuccessful")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can add complaint only in complaintPeriod")
self.create_award()
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
self.assertEqual(complaint["author"]["name"], test_organization["name"])
self.assertIn("id", complaint)
self.assertIn(complaint["id"], response.headers["Location"])
self.assertEqual(complaint["type"], "complaint")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_claim
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
self.assertEqual(complaint["author"]["name"], test_organization["name"])
self.assertIn("id", complaint)
self.assertIn(complaint["id"], response.headers["Location"])
self.assertEqual(complaint["type"], "claim")
response = self.app.get("/tenders/{}".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "active")
self.set_status("unsuccessful")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{"data": test_draft_claim},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add complaint in current (unsuccessful) tender status"
)
def patch_tender_award_complaint(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "active")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{"data": test_draft_complaint},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
owner_token = response.json["access"]["token"]
if get_now() < RELEASE_2020_04_19:
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"status": "cancelled", "cancellationReason": "reason"}},
status=200,
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "cancelled")
self.assertEqual(response.json["data"]["cancellationReason"], "reason")
else:
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"status": "cancelled", "cancellationReason": "reason"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"],
"Can't update draft complaint to cancelled status")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{"data": test_draft_claim},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
owner_token = response.json["access"]["token"]
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], self.tender_token
),
{"data": {"status": "cancelled", "cancellationReason": "reason"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], "Forbidden")
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"title": "claim title"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["title"], "claim title")
if get_now() < RELEASE_2020_04_19:
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"status": "pending"}},
)
else:
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, complaint["id"]
),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "pending")
response = self.app.patch_json(
"/tenders/{}/awards/some_id/complaints/some_id".format(self.tender_id),
{"data": {"status": "resolved", "resolution": "resolution text"}},
status=404,
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/some_id".format(self.tender_id, self.award_id),
{"data": {"status": "resolved", "resolution": "resolution text"}},
status=404,
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"complaint_id"}]
)
response = self.app.patch_json(
"/tenders/some_id/awards/some_id/complaints/some_id",
{"data": {"status": "resolved", "resolution": "resolution text"}},
status=404,
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
if RELEASE_2020_04_19 > get_now():
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"status": "stopping", "cancellationReason": "reason"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "stopping")
self.assertEqual(response.json["data"]["cancellationReason"], "reason")
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"status": "cancelled", "cancellationReason": "reason"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"],
"Can't update complaint from stopping to cancelled status")
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"])
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "stopping")
self.assertEqual(response.json["data"]["cancellationReason"], "reason")
else:
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token),
{"data": {"status": "stopping", "cancellationReason": "reason"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update complaint from pending to stopping status"
)
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{"data": test_draft_claim},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
owner_token = response.json["access"]["token"]
self.set_status("complete")
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"status": "claim"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update complaint in current (complete) tender status"
)
@patch("openprocurement.tender.core.views.complaint.RELEASE_2020_04_19", get_now() - timedelta(days=1))
def bot_patch_tender_award_complaint(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
complaint_data = deepcopy(test_draft_complaint)
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(
self.tender_id, self.award_id
),
{"data": complaint_data},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
owner_token = response.json["access"]["token"]
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "pending")
@patch("openprocurement.tender.core.views.complaint.RELEASE_2020_04_19", get_now() + timedelta(days=1))
def bot_patch_tender_award_complaint_forbidden(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
complaint_data = deepcopy(test_draft_complaint)
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(
self.tender_id, self.award_id
),
{"data": complaint_data},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
owner_token = response.json["access"]["token"]
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, complaint["id"], owner_token
),
{"data": {"status": "pending"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update complaint from draft to pending status"
)
def review_tender_award_complaint(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
for status in ["invalid", "stopped", "declined", "satisfied"]:
self.app.authorization = ("Basic", ("broker", ""))
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
if RELEASE_2020_04_19 < get_now():
self.assertEqual(response.json["data"]["status"], "draft")
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, complaint["id"]),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "pending")
self.app.authorization = ("Basic", ("reviewer", ""))
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": {"decision": "{} complaint".format(status), "rejectReasonDescription": "reject reason"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["decision"], "{} complaint".format(status))
self.assertEqual(response.json["data"]["rejectReasonDescription"], "reject reason")
if status in ["declined", "satisfied", "stopped"]:
now = get_now()
data = {"status": "accepted"}
if RELEASE_2020_04_19 < now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "accepted")
if RELEASE_2020_04_19 < now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
now = get_now()
data = {"decision": "accepted:{} complaint".format(status)}
if RELEASE_2020_04_19 > now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["decision"], "accepted:{} complaint".format(status))
if RELEASE_2020_04_19 > now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
now = get_now()
data = {"status": status}
if RELEASE_2020_04_19 < now:
if status in ["invalid", "stopped"]:
data.update({
"rejectReason": "tenderCancelled",
"rejectReasonDescription": "reject reason description"
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], status)
def review_tender_award_stopping_complaint(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
if RELEASE_2020_04_19 > get_now():
for status in ["stopped", "declined", "mistaken", "invalid", "satisfied"]:
self.app.authorization = ("Basic", ("broker", ""))
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
owner_token = response.json["access"]["token"]
url_patch_complaint = "/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, complaint["id"]
)
self.app.authorization = ("Basic", ("reviewer", ""))
data = {"decision": "decision", "status": status}
if status in ["invalid", "stopped"]:
data.update({
"rejectReason": "tenderCancelled",
"rejectReasonDescription": "reject reason description"
})
response = self.app.patch_json(url_patch_complaint, {"data": data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["decision"], "decision")
self.assertEqual(response.json["data"]["status"], status)
else:
pass
# same test in patch_tender_award_complaint
def get_tender_award_complaint(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{"data": test_draft_claim},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"])
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"], complaint)
response = self.app.get(
"/tenders/{}/awards/{}/complaints/some_id".format(self.tender_id, self.award_id), status=404
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"complaint_id"}]
)
response = self.app.get("/tenders/some_id/awards/some_id/complaints/some_id", status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
def get_tender_award_complaints(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{"data": test_draft_claim},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
response = self.app.get("/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"][0], complaint)
response = self.app.get("/tenders/some_id/awards/some_id/complaints", status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
tender = self.db.get(self.tender_id)
for i in tender.get("awards", []):
i["complaintPeriod"]["endDate"] = i["complaintPeriod"]["startDate"]
self.db.save(tender)
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{"data": test_draft_claim},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], "Can add complaint only in complaintPeriod")
def cancelled_award_with_complaint(self):
""" When complaint on award in satisfied status and owner cancel award,
then all awards and contracts must move to status cancelled """
# Move award to unsuccessful
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "unsuccessful")
self.old_award_id = self.award_id
# Create another award
self.create_award()
# Activate award
self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
# Create complaint
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
if RELEASE_2020_04_19 < get_now():
self.assertEqual(response.json["data"]["status"], "draft")
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, complaint["id"]),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "pending")
# Move complaint to satisfied
self.app.authorization = ("Basic", ("reviewer", ""))
now = get_now()
data = {"status": "accepted"}
if RELEASE_2020_04_19 < now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "accepted")
if RELEASE_2020_04_19 < now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
# Make decision
now = get_now()
data = {"decision": "satisfied complaint"}
if RELEASE_2020_04_19 > now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["decision"], "satisfied complaint")
if RELEASE_2020_04_19 > now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"], self.tender_token),
{"data": {"status": "satisfied"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "satisfied")
# Active award and then cancel it
self.app.authorization = ("Basic", ("broker", ""))
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
# Let's check another award
# From unsuccessful move to cancelled
response = self.app.get(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.old_award_id, self.tender_token)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
# And check contracts
response = self.app.get("/tenders/{}/contracts?acc_token={}".format(self.tender_id, self.tender_token))
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response.json["data"]), 1)
self.assertEqual(response.json["data"][0]["status"], "cancelled")
# TenderLotNegotiationAwardComplaintResourceTest
def create_tender_lot_award_complaints(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "unsuccessful")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can add complaint only in complaintPeriod")
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{"data": {"suppliers": [test_organization], "qualified": True, "status": "pending", "lotID": self.lot_id}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.award_id = award["id"]
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
self.assertEqual(complaint["author"]["name"], test_organization["name"])
self.assertIn("id", complaint)
self.assertIn(complaint["id"], response.headers["Location"])
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can add complaint only in complaintPeriod")
response = self.app.get("/tenders/{}".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "active")
self.set_status("unsuccessful")
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{"data": test_draft_claim},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add complaint in current (unsuccessful) tender status"
)
def cancelled_lot_award_with_complaint(self):
""" When complaint on award in satisfied status and owner cancel award,
then all awards (with same lotID) and contracts must move to status cancelled """
# Move award to unsuccessful
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.old_award_id = self.award_id
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "unsuccessful")
# Create another award
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{"data": {"suppliers": [test_organization], "qualified": True, "status": "pending", "lotID": self.lot["id"]}},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.award_id = award["id"]
# Activate award
self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
# Create complaint
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
if RELEASE_2020_04_19 < get_now():
self.assertEqual(response.json["data"]["status"], "draft")
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, complaint["id"]),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "pending")
# Move complaint to satisfied
self.app.authorization = ("Basic", ("reviewer", ""))
now = get_now()
data = {"status": "accepted"}
if RELEASE_2020_04_19 < now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "accepted")
if RELEASE_2020_04_19 < now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
# Make decision
now = get_now()
data = {"decision": "satisfied complaint"}
if RELEASE_2020_04_19 > now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["decision"], "satisfied complaint")
if RELEASE_2020_04_19 > now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"], self.tender_token),
{"data": {"status": "satisfied"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "satisfied")
# Active award and then cancel it
self.app.authorization = ("Basic", ("broker", ""))
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
# Let's check another award
response = self.app.get(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
self.assertEqual(response.json["data"]["lotID"], self.lot["id"])
response = self.app.get(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.old_award_id, self.tender_token)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
self.assertEqual(response.json["data"]["lotID"], self.lot["id"])
# And check contracts
response = self.app.get("/tenders/{}/contracts?acc_token={}".format(self.tender_id, self.tender_token))
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response.json["data"]), 1)
self.assertEqual(response.json["data"][0]["status"], "cancelled")
# Tender2LotNegotiationAwardComplaintResourceTest
def cancelled_2lot_award_with_complaint(self):
""" When complaint on award in satisfied status and owner cancel award,
then all awards (with same lotID) and contracts must move to status cancelled """
# Move first award to unsuccessful
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "unsuccessful")
# Create another award
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"qualified": True,
"status": "pending",
"lotID": self.first_lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.award_id = award["id"]
# Activate first award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
# Activate second award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
# Create complaint on first award
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
if RELEASE_2020_04_19 < get_now():
self.assertEqual(response.json["data"]["status"], "draft")
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, complaint["id"]),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "pending")
# Move complaint to satisfied
self.app.authorization = ("Basic", ("reviewer", ""))
now = get_now()
data = {"status": "accepted"}
if RELEASE_2020_04_19 < now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "accepted")
if RELEASE_2020_04_19 < now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
# Make decision
now = get_now()
data = {"decision": "satisfied complaint"}
if RELEASE_2020_04_19 > now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["decision"], "satisfied complaint")
if RELEASE_2020_04_19 > now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"], self.tender_token),
{"data": {"status": "satisfied"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "satisfied")
# Active award and then cancel it
self.app.authorization = ("Basic", ("broker", ""))
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
# Let's awards
response = self.app.get(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.first_award["id"], self.tender_token)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["lotID"], self.first_award["lotID"])
self.assertEqual(response.json["data"]["status"], "cancelled")
response = self.app.get(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award["id"], self.tender_token)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["lotID"], self.second_award["lotID"])
self.assertEqual(response.json["data"]["status"], "active")
# And check contracts
response = self.app.get("/tenders/{}/contracts?acc_token={}".format(self.tender_id, self.tender_token))
self.assertEqual(response.status, "200 OK")
for contract in response.json["data"]:
if contract["awardID"] == self.second_award["id"]:
self.assertEqual(contract["status"], "pending")
def cancelled_active_award_with_complaint(self):
""" When complaint on award in satisfied status and owner cancel award,
then all awards (with same lotID) and contracts must move to status cancelled """
# Activate first award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
# Activate second award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
# Create complaint on first award
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
if RELEASE_2020_04_19 < get_now():
self.assertEqual(response.json["data"]["status"], "draft")
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, complaint["id"]),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "pending")
# Move complaint to satisfied
self.app.authorization = ("Basic", ("reviewer", ""))
now = get_now()
data = {"status": "accepted"}
if RELEASE_2020_04_19 < now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "accepted")
if RELEASE_2020_04_19 < now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
# Make decision
now = get_now()
data = {"decision": "satisfied complaint"}
if RELEASE_2020_04_19 > now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["decision"], "satisfied complaint")
if RELEASE_2020_04_19 > now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"], self.tender_token),
{"data": {"status": "satisfied"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "satisfied")
# Active award and then cancel it
self.app.authorization = ("Basic", ("broker", ""))
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
# Let's check another award
response = self.app.get(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award["id"], self.tender_token)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
self.assertEqual(response.json["data"]["lotID"], self.second_award["lotID"])
# And check contracts
response = self.app.get("/tenders/{}/contracts?acc_token={}".format(self.tender_id, self.tender_token))
self.assertEqual(response.status, "200 OK")
for contract in response.json["data"]:
if contract["awardID"] == self.first_award["id"]:
self.assertEqual(contract["status"], "cancelled")
if contract["awardID"] == self.second_award["id"]:
self.assertEqual(contract["status"], "pending")
def cancelled_unsuccessful_award_with_complaint(self):
""" When complaint on award in satisfied status and owner cancel award,
then all awards (with same lotID) and contracts must move to status cancelled """
# Move first award to unsuccessful
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "unsuccessful")
# Create third award
request_path = "/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token)
response = self.app.post_json(
request_path,
{
"data": {
"suppliers": [test_organization],
"qualified": True,
"status": "pending",
"lotID": self.first_lot["id"],
}
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
award = response.json["data"]
self.award_id = award["id"]
# Activate second award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
# Activate third award
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
# Create complaint on third award
response = self.app.post_json(
"/tenders/{}/awards/{}/complaints".format(self.tender_id, self.award_id),
{
"data": test_complaint
},
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
complaint = response.json["data"]
if RELEASE_2020_04_19 < get_now():
self.assertEqual(response.json["data"]["status"], "draft")
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, complaint["id"]),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "pending")
# Move complaint to satisfied
self.app.authorization = ("Basic", ("reviewer", ""))
now = get_now()
data = {"status": "accepted"}
if RELEASE_2020_04_19 < now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "accepted")
if RELEASE_2020_04_19 < now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
# Make decision
now = get_now()
data = {"decision": "satisfied complaint"}
if RELEASE_2020_04_19 > now:
data.update({
"reviewDate": now.isoformat(),
"reviewPlace": "some",
})
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"]),
{"data": data},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["decision"], "satisfied complaint")
if RELEASE_2020_04_19 > now:
self.assertEqual(response.json["data"]["reviewPlace"], "some")
self.assertEqual(response.json["data"]["reviewDate"], now.isoformat())
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(self.tender_id, self.award_id, complaint["id"], self.tender_token),
{"data": {"status": "satisfied"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "satisfied")
# Cancel award
self.app.authorization = ("Basic", ("broker", ""))
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
# Let's check another award
response = self.app.get("/tenders/{}/awards?acc_token={}".format(self.tender_id, self.tender_token))
self.assertEqual(response.status, "200 OK")
for award in response.json["data"]:
if award["lotID"] == self.first_award["lotID"]:
self.assertEqual(award["status"], "cancelled")
else:
self.assertEqual(award["status"], "active")
# And check contracts
response = self.app.get("/tenders/{}/contracts?acc_token={}".format(self.tender_id, self.tender_token))
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response.json["data"]), 2)
self.assertEqual(response.json["data"][0]["status"], "pending")
# Tender2LotNegotiationAwardComplaintResourceTest
def two_awards_on_one_lot(self):
""" Create two award and move second on first lot """
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"lotID": self.second_lot["id"]}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"],
[{"location": "body", "name": "lotID", "description": "Another award is already using this lotID."}],
)
def change_lotID_from_unsuccessful_award(self):
""" Create two award, and then try change lotId when
award in status unsuccessful """
# Make award unsuccessful
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award_id, self.tender_token),
{"data": {"status": "unsuccessful"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "unsuccessful")
# Move award on another lot
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"lotID": self.second_lot["id"]}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["lotID"], self.second_lot["id"])
def change_lotID_from_active_award(self):
""" Create two award, and then change lotID when
award in status active """
# Try set lotID while another award has status pending
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"lotID": self.second_lot["id"]}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"],
[{"location": "body", "name": "lotID", "description": "Another award is already using this lotID."}],
)
# Make award active
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "active")
# Move award on another lot
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"lotID": self.second_lot["id"]}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"],
[{"location": "body", "name": "lotID", "description": "Another award is already using this lotID."}],
)
def change_lotID_from_cancelled_award(self):
""" Create two award, and then change lotID when
award in status cancelled """
# active second award
self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award_id, self.tender_token),
{"data": {"status": "active"}},
)
# Try set lotID while another award has status active
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"lotID": self.second_lot["id"]}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"],
[{"location": "body", "name": "lotID", "description": "Another award is already using this lotID."}],
)
# Make award cancelled
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.second_award_id, self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "cancelled")
# Move award on another lot
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"lotID": self.second_lot["id"]}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["lotID"], self.second_lot["id"])
# TenderNegotiationAwardComplaintDocumentResourceTest
def not_found(self):
response = self.app.post(
"/tenders/some_id/awards/some_id/complaints/some_id/documents",
status=404,
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
response = self.app.post(
"/tenders/{}/awards/some_id/complaints/some_id/documents".format(self.tender_id),
status=404,
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.post(
"/tenders/{}/awards/{}/complaints/some_id/documents".format(self.tender_id, self.award_id),
status=404,
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"complaint_id"}]
)
response = self.app.post(
"/tenders/{}/awards/{}/complaints/{}/documents?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, self.tender_token
),
status=404,
upload_files=[("invalid_value", "name.doc", "content")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(response.json["errors"], [{u"description": u"Not Found", u"location": u"body", u"name": u"file"}])
response = self.app.get("/tenders/some_id/awards/some_id/complaints/some_id/documents", status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
response = self.app.get(
"/tenders/{}/awards/some_id/complaints/some_id/documents".format(self.tender_id), status=404
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.get(
"/tenders/{}/awards/{}/complaints/some_id/documents".format(self.tender_id, self.award_id), status=404
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"complaint_id"}]
)
response = self.app.get("/tenders/some_id/awards/some_id/complaints/some_id/documents/some_id", status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
response = self.app.get(
"/tenders/{}/awards/some_id/complaints/some_id/documents/some_id".format(self.tender_id), status=404
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.get(
"/tenders/{}/awards/{}/complaints/some_id/documents/some_id".format(self.tender_id, self.award_id), status=404
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"complaint_id"}]
)
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/some_id".format(
self.tender_id, self.award_id, self.complaint_id
),
status=404,
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"document_id"}]
)
response = self.app.put(
"/tenders/some_id/awards/some_id/complaints/some_id/documents/some_id",
status=404,
upload_files=[("file", "name.doc", "content2")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}]
)
response = self.app.put(
"/tenders/{}/awards/some_id/complaints/some_id/documents/some_id".format(self.tender_id),
status=404,
upload_files=[("file", "name.doc", "content2")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"award_id"}]
)
response = self.app.put(
"/tenders/{}/awards/{}/complaints/some_id/documents/some_id".format(self.tender_id, self.award_id),
status=404,
upload_files=[("file", "name.doc", "content2")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"complaint_id"}]
)
response = self.app.put(
"/tenders/{}/awards/{}/complaints/{}/documents/some_id".format(
self.tender_id, self.award_id, self.complaint_id
),
status=404,
upload_files=[("file", "name.doc", "content2")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"document_id"}]
)
def create_tender_award_complaint_document(self):
response = self.app.post(
"/tenders/{}/awards/{}/complaints/{}/documents?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, self.tender_token
),
upload_files=[("file", "name.doc", "content")],
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add document in current (draft) complaint status"
)
response = self.app.post(
"/tenders/{}/awards/{}/complaints/{}/documents?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, self.complaint_owner_token
),
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
doc_id = response.json["data"]["id"]
self.assertIn(doc_id, response.headers["Location"])
self.assertEqual("name.doc", response.json["data"]["title"])
key = response.json["data"]["url"].split("?")[-1]
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents".format(self.tender_id, self.award_id, self.complaint_id)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"][0]["id"])
self.assertEqual("name.doc", response.json["data"][0]["title"])
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents?all=true".format(
self.tender_id, self.award_id, self.complaint_id
)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"][0]["id"])
self.assertEqual("name.doc", response.json["data"][0]["title"])
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?download=some_id".format(
self.tender_id, self.award_id, self.complaint_id, doc_id
),
status=404,
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"download"}]
)
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?{}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, key
)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/msword")
self.assertEqual(response.content_length, 7)
self.assertEqual(response.body, "content")
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/{}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id
)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
self.assertEqual("name.doc", response.json["data"]["title"])
self.set_status("complete")
response = self.app.post(
"/tenders/{}/awards/{}/complaints/{}/documents?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, self.tender_token
),
upload_files=[("file", "name.doc", "content")],
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add document in current (complete) tender status"
)
def put_tender_award_complaint_document(self):
response = self.app.post(
"/tenders/{}/awards/{}/complaints/{}/documents?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, self.complaint_owner_token
),
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
doc_id = response.json["data"]["id"]
self.assertIn(doc_id, response.headers["Location"])
response = self.app.put(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.tender_token
),
status=404,
upload_files=[("invalid_name", "name.doc", "content")],
)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(response.json["errors"], [{u"description": u"Not Found", u"location": u"body", u"name": u"file"}])
response = self.app.put(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.tender_token
),
upload_files=[("file", "name.doc", "content2")],
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], "Can update document only author")
response = self.app.put(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.complaint_owner_token
),
upload_files=[("file", "name.doc", "content2")],
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
key = response.json["data"]["url"].split("?")[-1]
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?{}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, key
)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/msword")
self.assertEqual(response.content_length, 8)
self.assertEqual(response.body, "content2")
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/{}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id
)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
self.assertEqual("name.doc", response.json["data"]["title"])
response = self.app.put(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.complaint_owner_token
),
"content3",
content_type="application/msword",
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
key = response.json["data"]["url"].split("?")[-1]
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?{}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, key
)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/msword")
self.assertEqual(response.content_length, 8)
self.assertEqual(response.body, "content3")
self.set_status("complete")
response = self.app.put(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.complaint_owner_token
),
upload_files=[("file", "name.doc", "content3")],
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update document in current (complete) tender status"
)
def patch_tender_award_complaint_document(self):
response = self.app.post(
"/tenders/{}/awards/{}/complaints/{}/documents?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, self.complaint_owner_token
),
upload_files=[("file", "name.doc", "content")],
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
doc_id = response.json["data"]["id"]
self.assertIn(doc_id, response.headers["Location"])
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.tender_token
),
{"data": {"description": "document description"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], "Can update document only author")
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.complaint_owner_token
),
{"data": {"description": "document description"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/{}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id
)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
self.assertEqual("document description", response.json["data"]["description"])
if get_now() < RELEASE_2020_04_19:
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, self.complaint_owner_token
),
{"data": {"status": "pending"}},
)
else:
with change_auth(self.app, ("Basic", ("bot", ""))):
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}".format(
self.tender_id, self.award_id, self.complaint_id
),
{"data": {"status": "pending"}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json["data"]["status"], "pending")
response = self.app.put(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.complaint_owner_token
),
"contentX",
content_type="application/msword",
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
key = response.json["data"]["url"].split("?")[-1]
response = self.app.get(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?{}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, key
)
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/msword")
self.assertEqual(response.content_length, 8)
self.assertEqual(response.body, "contentX")
self.set_status("complete")
response = self.app.patch_json(
"/tenders/{}/awards/{}/complaints/{}/documents/{}?acc_token={}".format(
self.tender_id, self.award_id, self.complaint_id, doc_id, self.complaint_owner_token
),
{"data": {"description": "document description"}},
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update document in current (complete) tender status"
)
# TenderAwardDocumentResourceTest
def create_tender_award_document_invalid(self):
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "active"}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.post(
"/tenders/{}/awards/{}/documents?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
upload_files=[("file", "name.doc", "content")],
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.json["errors"][0]["description"], "Can't add document in current (active) award status")
response = self.app.patch_json(
"/tenders/{}/awards/{}?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
{"data": {"status": "cancelled"}},
)
self.assertEqual(response.status, "200 OK")
response = self.app.post(
"/tenders/{}/awards/{}/documents?acc_token={}".format(self.tender_id, self.award_id, self.tender_token),
upload_files=[("file", "name.doc", "content")],
status=403,
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add document in current (cancelled) award status"
)
| 41.256577
| 127
| 0.635501
| 17,177
| 152,113
| 5.498748
| 0.016534
| 0.14563
| 0.210149
| 0.101628
| 0.967878
| 0.961907
| 0.959133
| 0.953225
| 0.951033
| 0.947582
| 0
| 0.014994
| 0.191483
| 152,113
| 3,686
| 128
| 41.26777
| 0.752996
| 0.026217
| 0
| 0.778359
| 0
| 0
| 0.255039
| 0.084202
| 0
| 0
| 0
| 0
| 0.31154
| 1
| 0.015038
| false
| 0.000327
| 0.002942
| 0
| 0.01798
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b74afcbcefbbafe3008c06a460c27227a934c48b
| 116
|
py
|
Python
|
teitoku/source/__init__.py
|
yukinotenshi/teitoku
|
adb54fb7f709e0bac0da6d6f6f8aa00702c2f9c5
|
[
"MIT"
] | null | null | null |
teitoku/source/__init__.py
|
yukinotenshi/teitoku
|
adb54fb7f709e0bac0da6d6f6f8aa00702c2f9c5
|
[
"MIT"
] | null | null | null |
teitoku/source/__init__.py
|
yukinotenshi/teitoku
|
adb54fb7f709e0bac0da6d6f6f8aa00702c2f9c5
|
[
"MIT"
] | 1
|
2020-01-25T10:53:44.000Z
|
2020-01-25T10:53:44.000Z
|
from teitoku.source.base_source import Source
from teitoku.source.sources import SourceUser, SourceGroup, SourceRoom
| 58
| 70
| 0.87069
| 15
| 116
| 6.666667
| 0.6
| 0.22
| 0.34
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077586
| 116
| 2
| 70
| 58
| 0.934579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b7e0ae0155853226b0a038de6ab448f563f6b448
| 39,164
|
py
|
Python
|
sos_trades_core/tests/l0_test_39_valueblock_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | 8
|
2022-01-10T14:44:28.000Z
|
2022-03-31T08:57:14.000Z
|
sos_trades_core/tests/l0_test_39_valueblock_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | null | null | null |
sos_trades_core/tests/l0_test_39_valueblock_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | 1
|
2022-02-21T14:51:45.000Z
|
2022-02-21T14:51:45.000Z
|
'''
Copyright 2022 Airbus SAS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
'''
mode: python; py-indent-offset: 4; tab-width: 4; coding: utf-8
'''
import unittest
from sos_trades_core.execution_engine.execution_engine import ExecutionEngine
from sos_trades_core.execution_engine.sos_multi_scatter_builder import SoSMultiScatterBuilderException
class TestMultiScatterBuilder(unittest.TestCase):
"""
SoSMultiScatterBuilder test class
"""
def setUp(self):
'''
Initialize third data needed for testing
'''
self.name = 'MyCase'
self.study_name = f'{self.name}'
self.exec_eng = ExecutionEngine(self.name)
self.factory = self.exec_eng.factory
self.repo = 'sos_trades_core.sos_processes.test'
self.sub_proc = 'test_disc1_disc2_coupling'
base_path = 'sos_trades_core.sos_wrapping.test_discs'
self.mod1_path = f'{base_path}.disc1.Disc1'
self.mod2_path = f'{base_path}.disc2.Disc2'
def test_01_multibuilder_scatter(self):
# load process in GUI
mydict = {'input_name': 'name_list',
'input_type': 'string_list',
'input_ns': 'ns_barrierr',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac',
'gather_ns': 'ns_barrierr'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('name_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_barrierr', 'MyCase')
# Create scatter map for all value blocks
vb_dict_map = {'input_name': 'vb_dict',
'input_type': 'dict',
'input_ns': 'ns_barrierr',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('vb_dict_map', vb_dict_map)
# instantiate factory # get instantiator from Discipline class
cls_list = self.factory.get_builder_from_process(repo=self.repo,
mod_id=self.sub_proc) # get instantiator from Process
scatter_list = self.exec_eng.factory.create_value_block_builder(
'Business', 'vb_dict_map', 'name_list', cls_list, autogather=True)
self.exec_eng.factory.set_builders_to_coupling_builder(scatter_list)
self.exec_eng.configure()
# User fill in the fields in the GUI
dict_values = {self.study_name +
'.vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1', 'Disc2']},
'actor2': {'name_3': ['Disc1', 'Disc2']}}}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
constant1 = 10
constant2 = 20
power1 = 2
power2 = 3
private_val = {}
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.constant'] = constant1
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.power'] = power1
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.constant'] = constant2
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.power'] = power2
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
private_val[self.study_name + '.name_1.x'] = x1
private_val[self.study_name + '.name_2.x'] = x2
private_val[self.study_name + '.Business.actor1.Disc1.name_1.a'] = a1
private_val[self.study_name + '.Business.actor1.Disc1.name_2.a'] = a2
private_val[self.study_name + '.Business.actor1.Disc1.name_1.b'] = b1
private_val[self.study_name + '.Business.actor1.Disc1.name_2.b'] = b2
private_val[self.study_name +
'.Business.actor2.Disc2.name_3.constant'] = constant2
private_val[self.study_name +
'.Business.actor2.Disc2.name_3.power'] = power2
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
private_val[self.study_name + '.name_3.x'] = x1
private_val[self.study_name + '.Business.actor2.Disc1.name_3.a'] = a1
private_val[self.study_name + '.Business.actor2.Disc1.name_3.b'] = b1
self.exec_eng.load_study_from_input_dict(private_val)
self.exec_eng.execute()
y1 = self.exec_eng.dm.get_value(self.study_name + '.name_1.y')
y2 = self.exec_eng.dm.get_value(self.study_name + '.name_2.y')
y3 = self.exec_eng.dm.get_value(self.study_name + '.name_3.y')
self.assertEqual(y1, a1 * x1 + b1)
self.assertEqual(y3, a1 * x1 + b1)
z1 = self.exec_eng.dm.get_value(self.study_name + '.name_1.z')
z2 = self.exec_eng.dm.get_value(self.study_name + '.name_2.z')
z3 = self.exec_eng.dm.get_value(self.study_name + '.name_3.z')
self.assertEqual(z1, constant1 + y1**power1)
self.assertEqual(z3, constant2 + y3**power2)
z_dict = self.exec_eng.dm.get_value(
self.study_name + '.Business.actor1.z_dict')
# Check gather disciplines
self.assertDictEqual(z_dict, {'name_1': z1, 'name_2': z2})
y_dict = self.exec_eng.dm.get_value(
self.study_name + '.Business.actor1.y_dict')
# Check gather disciplines
self.assertDictEqual(y_dict, {'name_1': y1, 'name_2': y2})
z_dict = self.exec_eng.dm.get_value(
self.study_name + '.Business.actor2.z_dict')
# Check gather disciplines
self.assertDictEqual(z_dict, {'name_3': z3})
y_dict = self.exec_eng.dm.get_value(
self.study_name + '.Business.actor2.y_dict')
# Check gather disciplines
self.assertDictEqual(y_dict, {'name_3': y3})
self.assertFalse(self.exec_eng.dm.get_data(
self.study_name + '.name_list', 'editable'))
self.assertListEqual(self.exec_eng.dm.get_value(
self.study_name + '.Business.actor1.name_list_actor1'), ['name_1', 'name_2'])
self.assertListEqual(self.exec_eng.dm.get_value(
self.study_name + '.Business.actor2.name_list_actor2'), ['name_3'])
def test_02_multi_scenarios_of_multibuilder_scatter(self):
# load process in GUI
scenario_map = {'input_name': 'scenario_list',
'input_type': 'string_list',
'input_ns': 'ns_scatter_scenario',
'output_name': 'scenario_name',
'scatter_ns': 'ns_scenario',
'gather_ns': 'ns_ac',
'ns_to_update': ['ns_barrierr']}
self.exec_eng.smaps_manager.add_build_map(
'scenario_list', scenario_map)
self.exec_eng.ns_manager.add_ns(
'ns_scatter_scenario', 'MyCase.multi_scenarios')
mydict = {'input_name': 'name_list',
'input_type': 'string_list',
'input_ns': 'ns_barrierr',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac',
'gather_ns': 'ns_barrierr'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('name_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_barrierr', 'MyCase')
self.exec_eng.ns_manager.add_ns('ns_public', 'MyCase')
# Create scatter map for all value blocks
vb_dict_map = {'input_name': 'vb_dict',
'input_type': 'dict',
'input_ns': 'ns_barrierr',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('vb_dict_map', vb_dict_map)
cls_list = self.factory.get_builder_from_process(repo=self.repo,
mod_id=self.sub_proc) # get instantiator from Process
scatter_list = self.exec_eng.factory.create_value_block_builder(
'Business', 'vb_dict_map', 'name_list', cls_list, autogather=True, builder_child_path=None)
multi_scenarios = self.exec_eng.factory.create_multi_scenario_builder(
'multi_scenarios', 'scenario_list', [scatter_list], autogather=True, gather_node='Post-processing', business_post_proc=True)
self.exec_eng.factory.set_builders_to_coupling_builder(multi_scenarios)
self.exec_eng.configure()
# User fill in the fields in the GUI
self.study_name = 'MyCase.multi_scenarios'
trade_variables_dict = {'vb_dict': 'dict'}
dict_values = {f'{self.study_name}.trade_variables': trade_variables_dict,
f'{self.study_name}.vb_dict_trade': [{'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1', 'Disc2']}, 'actor2': {'name_3': ['Disc1', 'Disc2']}},
{'actor1': {'name_1': ['Disc1', 'Disc2']}}]}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
constant1 = 10
constant2 = 20
power1 = 2
power2 = 3
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
private_val = {}
for scenario in ['scenario_1', 'scenario_2']:
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc2.name_1.constant'] = constant1
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc2.name_1.power'] = power1
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc2.name_2.constant'] = constant2
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc2.name_2.power'] = power2
private_val[self.study_name + f'.{scenario}.name_1.x'] = x1
private_val[self.study_name + f'.{scenario}.name_2.x'] = x2
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc1.name_1.a'] = a1
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc1.name_2.a'] = a2
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc1.name_1.b'] = b1
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc1.name_2.b'] = b2
# self.exec_eng.dm.set_values_from_dict(private_val)
private_val[self.study_name +
f'.{scenario}.name_list'] = ['name_1', 'name_3']
private_val[self.study_name +
f'.{scenario}.Business.actor2.Disc2.name_3.constant'] = constant2
private_val[self.study_name +
f'.{scenario}.Business.actor2.Disc2.name_3.power'] = power2
private_val[self.study_name + f'.{scenario}.name_3.x'] = x1
private_val[self.study_name +
f'.{scenario}.Business.actor2.Disc1.name_3.a'] = a1
private_val[self.study_name +
f'.{scenario}.Business.actor2.Disc1.name_3.b'] = b1
self.exec_eng.load_study_from_input_dict(private_val)
self.exec_eng.display_treeview_nodes()
self.exec_eng.execute()
self.exec_eng.display_treeview_nodes()
scenario_dict = self.exec_eng.dm.get_value(
'MyCase.Post-processing.Business.scenario_dict')
self.assertDictEqual(scenario_dict, {'scenario_1': {'vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1', 'Disc2'
]}, 'actor2': {'name_3': ['Disc1', 'Disc2']}}}, 'scenario_2': {'vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2']}}}})
def test_03_modify_vb_dict_multibuilder_scatter(self):
# load process in GUI
mydict = {'input_name': 'name_list',
'input_type': 'string_list',
'input_ns': 'ns_barrierr',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac',
'gather_ns': 'ns_barrierr'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('name_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_barrierr', 'MyCase')
# Create scatter map for all value blocks
vb_dict_map = {'input_name': 'vb_dict',
'input_type': 'dict',
'input_ns': 'ns_barrierr',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('vb_dict_map', vb_dict_map)
# instantiate factory # get instantiator from Discipline class
cls_list = self.factory.get_builder_from_process(repo=self.repo,
mod_id=self.sub_proc) # get instantiator from Process
scatter_list = self.exec_eng.factory.create_value_block_builder(
'Business', 'vb_dict_map', 'name_list', cls_list, autogather=True)
self.exec_eng.factory.set_builders_to_coupling_builder(scatter_list)
self.exec_eng.configure()
# User fill in the fields in the GUI
dict_values = {self.study_name +
'.vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1', 'Disc2']},
'actor2': {'name_3': ['Disc1', 'Disc2']}}}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
constant1 = 10
constant2 = 20
power1 = 2
power2 = 3
private_val = {}
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.constant'] = constant1
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.power'] = power1
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.constant'] = constant2
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.power'] = power2
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
private_val[self.study_name + '.name_1.x'] = x1
private_val[self.study_name + '.name_2.x'] = x2
private_val[self.study_name + '.Business.actor1.Disc1.name_1.a'] = a1
private_val[self.study_name + '.Business.actor1.Disc1.name_2.a'] = a2
private_val[self.study_name + '.Business.actor1.Disc1.name_1.b'] = b1
private_val[self.study_name + '.Business.actor1.Disc1.name_2.b'] = b2
private_val[self.study_name +
'.name_list'] = ['name_1', 'name_3']
private_val[self.study_name +
'.Business.actor2.Disc2.name_3.constant'] = constant2
private_val[self.study_name +
'.Business.actor2.Disc2.name_3.power'] = power2
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
private_val[self.study_name + '.name_3.x'] = x1
private_val[self.study_name + '.Business.actor2.Disc1.name_3.a'] = a1
private_val[self.study_name + '.Business.actor2.Disc1.name_3.b'] = b1
len_old_discipline_dict = len(self.exec_eng.dm.disciplines_dict)
self.exec_eng.load_study_from_input_dict(private_val)
self.exec_eng.display_treeview_nodes()
self.exec_eng.execute()
dict_values = {self.study_name +
'.vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2']},
'actor2': {'name_3': ['Disc1', 'Disc2']}}}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
new_discipline_dict = self.exec_eng.dm.disciplines_dict
# 2 disciplines have been erased (Disc1.name2 and Disc2.name2)
self.assertEqual(len_old_discipline_dict - 2, len(new_discipline_dict))
dict_values = {self.study_name +
'.vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1', 'Disc2'], 'name_5': ['Disc1', 'Disc2']},
'actor2': {'name_3': ['Disc1', 'Disc2']}}}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
new_discipline_dict = self.exec_eng.dm.disciplines_dict
# 2 disciplines have been added (Disc1.name5 and Disc2.name5)
self.assertEqual(len_old_discipline_dict + 2, len(new_discipline_dict))
dict_values = {self.study_name +
'.vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1', 'Disc2']},
'actor2': {'name_3': ['Disc1', 'Disc2']},
'actor3': {'name_6': ['Disc1', 'Disc2']}}}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
new_discipline_dict = self.exec_eng.dm.disciplines_dict
# 6 disciplines have been added (two scatter +two gather+ two disciplines under
# the two scatters)
self.assertEqual(len_old_discipline_dict + 6, len(new_discipline_dict))
dict_values = {self.study_name +
'.vb_dict': {'actor2': {'name_3': ['Disc1', 'Disc2']}}}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
new_discipline_dict = self.exec_eng.dm.disciplines_dict
# 8 disciplines have been erased (2 scatters + 2 gathers and 4 disciplines under
# the two scatters)
self.assertEqual(len_old_discipline_dict - 8, len(new_discipline_dict))
def test_04_clean_non_activated_value_blocks(self):
# load process in GUI
scenario_map = {'input_name': 'scenario_list',
'input_type': 'string_list',
'input_ns': 'ns_scatter_scenario',
'output_name': 'scenario_name',
'scatter_ns': 'ns_scenario',
'gather_ns': 'ns_scatter_scenario',
'ns_to_update': ['ns_barrierr']}
self.exec_eng.smaps_manager.add_build_map(
'scenario_list', scenario_map)
self.exec_eng.ns_manager.add_ns(
'ns_scatter_scenario', 'MyCase.multi_scenarios')
mydict = {'input_name': 'name_list',
'input_type': 'string_list',
'input_ns': 'ns_barrierr',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac',
'gather_ns': 'ns_barrierr'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('name_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_barrierr', 'MyCase')
self.exec_eng.ns_manager.add_ns('ns_public', 'MyCase')
# Create scatter map for all value blocks
vb_dict_map = {'input_name': 'vb_dict',
'input_type': 'dict',
'input_ns': 'ns_barrierr',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('vb_dict_map', vb_dict_map)
cls_list = self.factory.get_builder_from_process(repo=self.repo,
mod_id=self.sub_proc) # get instantiator from Process
scatter_list = self.exec_eng.factory.create_value_block_builder(
'Business', 'vb_dict_map', 'name_list', cls_list, autogather=True, builder_child_path=None)
multi_scenarios = self.exec_eng.factory.create_multi_scenario_builder(
'multi_scenarios', 'scenario_list', [scatter_list], autogather=True, gather_node='Post-processing', business_post_proc=True)
self.exec_eng.factory.set_builders_to_coupling_builder(multi_scenarios)
self.exec_eng.configure()
# User fill in the fields in the GUI
self.study_name = 'MyCase.multi_scenarios'
trade_variables_dict = {'vb_dict': 'dict'}
dict_values = {f'{self.study_name}.trade_variables': trade_variables_dict,
f'{self.study_name}.vb_dict_trade': [{'actor1': {'name_1': ['Disc1']}},
{'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1']}, 'actor2': {'name_3': ['Disc2']}}]}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
constant1 = 10
constant2 = 20
power1 = 2
power2 = 3
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
private_val = {}
for scenario in ['scenario_1', 'scenario_2']:
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc2.name_1.constant'] = constant1
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc2.name_1.power'] = power1
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc2.name_2.constant'] = constant2
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc2.name_2.power'] = power2
private_val[self.study_name + f'.{scenario}.name_1.x'] = x1
private_val[self.study_name + f'.{scenario}.name_2.x'] = x2
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc1.name_1.a'] = a1
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc1.name_2.a'] = a2
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc1.name_1.b'] = b1
private_val[self.study_name +
f'.{scenario}.Business.actor1.Disc1.name_2.b'] = b2
private_val[self.study_name +
f'.{scenario}.name_list'] = ['name_1', 'name_3']
private_val[self.study_name +
f'.{scenario}.Business.actor2.Disc2.name_3.constant'] = constant2
private_val[self.study_name +
f'.{scenario}.Business.actor2.Disc2.name_3.power'] = power2
private_val[self.study_name +
f'.scenario_2.name_3.y'] = 2.0
private_val[self.study_name + f'.{scenario}.name_3.x'] = x1
private_val[self.study_name +
f'.{scenario}.Business.actor2.Disc1.name_3.a'] = a1
private_val[self.study_name +
f'.{scenario}.Business.actor2.Disc1.name_3.b'] = b1
self.exec_eng.load_study_from_input_dict(private_val)
self.exec_eng.display_treeview_nodes()
self.exec_eng.execute()
scenario_dict = self.exec_eng.dm.get_value(
'MyCase.Post-processing.Business.scenario_dict')
# test on scenario_dict value
self.assertDictEqual(scenario_dict, {'scenario_1': {'vb_dict': {'actor1': {'name_1': ['Disc1']}}}, 'scenario_2': {'vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': [
'Disc1']}, 'actor2': {'name_3': ['Disc2']}}}})
# test associated_inputs updated namespaces (name_list)
self.assertListEqual(list(self.exec_eng.dm.get_all_namespaces_from_var_name('name_list')), list(
['MyCase.multi_scenarios.scenario_1.name_list', 'MyCase.multi_scenarios.scenario_2.name_list']))
self.assertEqual(self.exec_eng.dm.get_data(
'MyCase.multi_scenarios.scenario_1.name_list', 'namespace'), 'ns_scenario')
self.assertEqual(self.exec_eng.dm.get_data(
'MyCase.multi_scenarios.scenario_2.name_list', 'namespace'), 'ns_scenario')
name_list_scen_1 = self.exec_eng.dm.get_value(
'MyCase.multi_scenarios.scenario_1.name_list')
name_list_scen_1.sort()
self.assertListEqual(name_list_scen_1, list(
['name_1']))
name_list_scen_2 = self.exec_eng.dm.get_value(
'MyCase.multi_scenarios.scenario_2.name_list')
name_list_scen_2.sort()
self.assertListEqual(name_list_scen_2, list(
['name_1', 'name_2', 'name_3']))
# vb_dict_trade modification in DM
dict_values[f'{self.study_name}.vb_dict_trade'] = [{'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': []}, 'actor2': {'name_3': ['Disc1']}},
{'actor1': {'name_1': ['Disc2']}}]
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
scenario_dict = self.exec_eng.dm.get_value(
'MyCase.Post-processing.Business.scenario_dict')
self.assertDictEqual(scenario_dict, {'scenario_1': {'vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': []}, 'actor2': {'name_3': ['Disc1']}}}, 'scenario_2': {'vb_dict': {'actor1': {'name_1': ['Disc2']}}}}
)
vb_dict_scen_1 = self.exec_eng.dm.get_value(
'MyCase.multi_scenarios.scenario_1.vb_dict')
self.assertDictEqual(vb_dict_scen_1, {'actor1': {'name_1': [
'Disc1', 'Disc2'], 'name_2': []}, 'actor2': {'name_3': ['Disc1']}})
vb_dict_scen_2 = self.exec_eng.dm.get_value(
'MyCase.multi_scenarios.scenario_2.vb_dict')
self.assertDictEqual(vb_dict_scen_2, {'actor1': {'name_1': ['Disc2']}})
# vb_dict_trade modification in DM
dict_values[f'{self.study_name}.vb_dict_trade'] = [{'actor1': {'name_1': ['Disc1', 'Disc2']}, 'actor2': {'name_2': ['Disc1']}},
{'actor3': {'name_2': ['Disc1']}}]
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
scenario_dict = self.exec_eng.dm.get_value(
'MyCase.Post-processing.Business.scenario_dict')
# vb_dict_trade modification in DM
dict_values[f'{self.study_name}.vb_dict_trade'] = [{'actor3': {
'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc2']}}]
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
scenario_dict = self.exec_eng.dm.get_value(
'MyCase.Post-processing.Business.scenario_dict')
self.assertDictEqual(scenario_dict, {'scenario_1': {'vb_dict': {'actor3': {'name_1': [
'Disc1', 'Disc2'], 'name_2': ['Disc2']}}}})
# test on cleaning after re-configure
self.assertNotIn(
'MyCase.multi_scenarios.scenario_2.vb_dict', self.exec_eng.dm.data_id_map)
def test_05_modify_vb_dict_multibuilder_scatter_with_error(self):
# load process in GUI
mydict = {'input_name': 'name_list',
'input_type': 'string_list',
'input_ns': 'ns_barrierr',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac',
'gather_ns': 'ns_barrierr'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('name_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_barrierr', 'MyCase')
# Create scatter map for all value blocks
vb_dict_map = {'input_name': 'vb_dict',
'input_type': 'dict',
'input_ns': 'ns_barrierr',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('vb_dict_map', vb_dict_map)
# instantiate factory # get instantiator from Discipline class
cls_list = self.factory.get_builder_from_process(repo=self.repo,
mod_id=self.sub_proc) # get instantiator from Process
scatter_list = self.exec_eng.factory.create_value_block_builder(
'Business', 'vb_dict_map', 'name_list', cls_list, autogather=True)
self.exec_eng.factory.set_builders_to_coupling_builder(scatter_list)
self.exec_eng.configure()
# User fill in the fields in the GUI
dict_values = {self.study_name +
'.vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2']}}}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
constant1 = 10
constant2 = 20
power1 = 2
power2 = 3
private_val = {}
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.constant'] = constant1
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.power'] = power1
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.constant'] = constant2
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.power'] = power2
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
private_val[self.study_name + '.name_1.x'] = x1
private_val[self.study_name + '.name_2.x'] = x2
private_val[self.study_name + '.Business.actor1.Disc1.name_1.a'] = a1
private_val[self.study_name + '.Business.actor1.Disc1.name_2.a'] = a2
private_val[self.study_name + '.Business.actor1.Disc1.name_1.b'] = b1
private_val[self.study_name + '.Business.actor1.Disc1.name_2.b'] = b2
private_val[self.study_name +
'.name_list'] = ['name_1', 'name_3']
private_val[self.study_name +
'.Business.actor2.Disc2.name_3.constant'] = constant2
private_val[self.study_name +
'.Business.actor2.Disc2.name_3.power'] = power2
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
private_val[self.study_name + '.name_3.x'] = x1
private_val[self.study_name + '.Business.actor2.Disc1.name_3.a'] = a1
private_val[self.study_name + '.Business.actor2.Disc1.name_3.b'] = b1
self.exec_eng.load_study_from_input_dict(private_val)
self.exec_eng.display_treeview_nodes()
self.exec_eng.execute()
wrong_dict_values = {self.study_name +
'.vb_dict': {'actor1': 'aaa'}}
self.assertRaises(
SoSMultiScatterBuilderException, self.exec_eng.load_study_from_input_dict, wrong_dict_values)
correct_dict_values = {self.study_name +
'.vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1', 'Disc2']}}}
self.exec_eng.load_study_from_input_dict(correct_dict_values)
def test_06_multibuilder_scatter_of_scatter(self):
# load process in GUI
mydict = {'input_name': 'name_list',
'input_type': 'string_list',
'input_ns': 'ns_barrierr',
'output_name': 'ac_name',
'scatter_ns': 'ns_name',
'gather_ns': 'ns_barrierr',
'ns_to_update': ['ns_ac', 'gather_sub_name']} # or object ScatterMapBuild
self.exec_eng.smaps_manager.add_build_map('name_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_barrierr', 'MyCase')
# load process in GUI
mydict = {'input_name': 'sub_name_list',
'input_type': 'string_list',
'input_ns': 'ns_actor',
'output_name': 'sub_name',
'scatter_ns': 'ns_sub_name',
'gather_ns': 'gather_sub_name',
'ns_to_update': ['ns_ac']}
self.exec_eng.smaps_manager.add_build_map('sub_name_list', mydict)
self.exec_eng.ns_manager.add_ns('gather_sub_name', 'MyCase.Business')
self.exec_eng.ns_manager.add_ns('ns_out', 'MyCase')
self.exec_eng.ns_manager.add_ns('ns_actor', 'MyCase.Business')
# get instantiator from local Process
builder_list = self.factory.get_builder_from_process(repo=self.repo,
mod_id=self.sub_proc)
scatter_sub_name_list = self.exec_eng.factory.create_multi_scatter_builder_from_list(
'sub_name_list', builder_list=builder_list, autogather=True)
# Create scatter map for all value blocks
vb_dict_map = {'input_name': 'vb_dict',
'input_type': 'dict',
'input_ns': 'ns_barrierr',
'scatter_ns': 'ns_name',
'ns_to_update': ['ns_ac', 'gather_sub_name'],
'ns_to_update_with_actor': ['ns_actor', 'gather_sub_name']} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('vb_dict_map', vb_dict_map)
# instantiate factory # get instantiator from Discipline class
scatter_list = self.exec_eng.factory.create_value_block_builder(
'Business', 'vb_dict_map', 'name_list', scatter_sub_name_list, autogather=True)
self.exec_eng.factory.set_builders_to_coupling_builder(scatter_list)
self.exec_eng.configure()
# User fill in the fields in the GUI
dict_values = {self.study_name +
'.vb_dict': {'actor1': {'name_1': ['Disc1', 'Disc2'], 'name_2': ['Disc1', 'Disc2']},
'actor2': {'name_3': ['Disc1', 'Disc2']}},
self.study_name +
'.Business.actor1.sub_name_list': ['sub_name_1', 'sub_name_2'],
self.study_name +
'.Business.actor2.sub_name_list': ['sub_name_3']}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.display_treeview_nodes()
constant1 = 10
constant2 = 20
constant3 = 30
power1 = 2
power2 = 3
power3 = 3
x1 = 2
a1 = 3
b1 = 4
x2 = 4
a2 = 6
b2 = 2
x3 = 5
a3 = 6
b3 = 6
private_val = {}
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.sub_name_1.constant'] = constant1
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.sub_name_1.power'] = power1
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.sub_name_2.constant'] = constant1
private_val[self.study_name +
'.Business.actor1.Disc2.name_1.sub_name_2.power'] = power1
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.sub_name_1.constant'] = constant2
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.sub_name_1.power'] = power2
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.sub_name_2.constant'] = constant2
private_val[self.study_name +
'.Business.actor1.Disc2.name_2.sub_name_2.power'] = power2
private_val[self.study_name +
'.Business.actor2.Disc2.name_3.sub_name_3.constant'] = constant3
private_val[self.study_name +
'.Business.actor2.Disc2.name_3.sub_name_3.power'] = power3
private_val[self.study_name + '.name_1.sub_name_1.x'] = x1
private_val[self.study_name + '.name_1.sub_name_2.x'] = x1
private_val[self.study_name + '.name_2.sub_name_1.x'] = x2
private_val[self.study_name + '.name_2.sub_name_2.x'] = x2
private_val[self.study_name + '.name_3.sub_name_3.x'] = x3
private_val[self.study_name +
'.Business.actor1.Disc1.name_1.sub_name_1.a'] = a1
private_val[self.study_name +
'.Business.actor1.Disc1.name_2.sub_name_1.a'] = a2
private_val[self.study_name +
'.Business.actor1.Disc1.name_1.sub_name_2.a'] = a1
private_val[self.study_name +
'.Business.actor1.Disc1.name_2.sub_name_2.a'] = a2
private_val[self.study_name +
'.Business.actor1.Disc1.name_1.sub_name_1.b'] = b1
private_val[self.study_name +
'.Business.actor1.Disc1.name_2.sub_name_1.b'] = b2
private_val[self.study_name +
'.Business.actor1.Disc1.name_1.sub_name_2.b'] = b1
private_val[self.study_name +
'.Business.actor1.Disc1.name_2.sub_name_2.b'] = b2
private_val[self.study_name +
'.Business.actor2.Disc1.name_3.sub_name_3.a'] = a3
private_val[self.study_name +
'.Business.actor2.Disc1.name_3.sub_name_3.b'] = b3
self.exec_eng.load_study_from_input_dict(private_val)
actor1_disc1_disc = self.exec_eng.dm.disciplines_id_map['MyCase.Business.actor1.Disc1']
self.assertEqual(len(actor1_disc1_disc), 3)
# scatter of scatter
scatter_sub_names = self.exec_eng.dm.get_discipline(
actor1_disc1_disc[0])
self.assertEqual(scatter_sub_names.__class__.__name__,
'SoSDisciplineScatter')
self.assertEqual(scatter_sub_names.scatter_builders.cls.__name__,
'SoSDisciplineScatter')
# gather of scatter (of scatter)
gather_sub_names = self.exec_eng.dm.get_discipline(
actor1_disc1_disc[1])
self.assertEqual(gather_sub_names.__class__.__name__,
'SoSDisciplineGather')
self.assertEqual(gather_sub_names.builder.cls.__name__,
'SoSDisciplineScatter')
# scatter of gather
scatter_gather_sub_names = self.exec_eng.dm.get_discipline(
actor1_disc1_disc[2])
self.assertEqual(scatter_gather_sub_names.__class__.__name__,
'SoSDisciplineScatter')
self.assertEqual(scatter_gather_sub_names.scatter_builders.cls.__name__,
'SoSDisciplineGather')
if '__main__' == __name__:
cls = TestMultiScatterBuilder()
cls.setUp()
cls.test_02_multi_scenarios_of_multibuilder_scatter()
| 45.171857
| 243
| 0.589623
| 4,789
| 39,164
| 4.480058
| 0.05805
| 0.058728
| 0.084829
| 0.092985
| 0.879748
| 0.862363
| 0.843113
| 0.829038
| 0.807877
| 0.795572
| 0
| 0.033388
| 0.289552
| 39,164
| 866
| 244
| 45.224018
| 0.737708
| 0.075784
| 0
| 0.728414
| 0
| 0
| 0.248021
| 0.126482
| 0
| 0
| 0
| 0
| 0.054945
| 1
| 0.010989
| false
| 0
| 0.00471
| 0
| 0.017268
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d1d607f62b4ca17c0baef471a575f5127ece0ea
| 3,543
|
py
|
Python
|
ros2_batch_job/vendor/osrf_pycommon/tests/unit/test_cli_utils/test_verb_pattern.py
|
taewoong0627/ci
|
12766009d19fe668aa1e59a6069a9b446ed77fba
|
[
"Apache-2.0"
] | 31
|
2016-09-28T16:55:15.000Z
|
2022-03-06T15:24:48.000Z
|
ros2_batch_job/vendor/osrf_pycommon/tests/unit/test_cli_utils/test_verb_pattern.py
|
taewoong0627/ci
|
12766009d19fe668aa1e59a6069a9b446ed77fba
|
[
"Apache-2.0"
] | 473
|
2016-10-17T16:08:17.000Z
|
2022-03-30T17:56:40.000Z
|
ros2_batch_job/vendor/osrf_pycommon/tests/unit/test_cli_utils/test_verb_pattern.py
|
taewoong0627/ci
|
12766009d19fe668aa1e59a6069a9b446ed77fba
|
[
"Apache-2.0"
] | 34
|
2017-07-11T23:25:11.000Z
|
2022-03-06T08:39:23.000Z
|
import unittest
from osrf_pycommon.cli_utils import verb_pattern
called = None
class TestCliUtilsVerbPattern(unittest.TestCase):
def test_call_prepare_arguments(self):
global called
cpa = verb_pattern.call_prepare_arguments
# Try with basic, one parameter
called = False
def fake_prepare_arguments(parser):
global called
called = True
if called:
pass
return parser
r = cpa(fake_prepare_arguments, None)
self.assertTrue(called)
self.assertIsNone(r)
# Try with args
called = False
def fake_prepare_arguments(parser, args):
global called
called = True
if called:
pass
return parser
r = cpa(fake_prepare_arguments, None)
self.assertTrue(called)
self.assertIsNone(r)
# Try with self
called = False
class Foo:
def fake_prepare_arguments(self, parser, args):
global called
called = True
if called:
pass
return parser
f = Foo()
r = cpa(f.fake_prepare_arguments, None)
self.assertTrue(called)
self.assertIsNone(r)
# Try with more than needed
called = False
class Foo:
def fake_prepare_arguments(self, parser, args, extra):
global called
called = True
if called:
pass
return parser
f = Foo()
with self.assertRaisesRegexp(ValueError, 'one or two parameters'):
r = cpa(f.fake_prepare_arguments, None)
# Try with less than needed
called = False
class Foo:
def fake_prepare_arguments(self):
global called
called = True
if called:
pass
return 'Should not get here'
f = Foo()
with self.assertRaisesRegexp(ValueError, 'one or two parameters'):
r = cpa(f.fake_prepare_arguments, None)
# Try with additional optional argument
called = False
class Foo:
def fake_prepare_arguments(self, parser, args, optional=None):
global called
called = True
if called:
pass
return parser
f = Foo()
r = cpa(f.fake_prepare_arguments, None)
self.assertTrue(called)
self.assertIsNone(r)
def test_split_arguments_by_verb(self):
args = ['--cmd-arg1', 'verb', '--verb-arg1', '--verb-arg2']
expected = ('verb', ['--cmd-arg1'], ['--verb-arg1', '--verb-arg2'])
self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected)
args = ['verb', '--verb-arg1', '--verb-arg2']
expected = ('verb', [], ['--verb-arg1', '--verb-arg2'])
self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected)
args = ['--cmd-arg1', 'verb']
expected = ('verb', ['--cmd-arg1'], [])
self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected)
args = ['verb']
expected = ('verb', [], [])
self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected)
args = ['--cmd-arg1']
expected = (None, ['--cmd-arg1'], [])
self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected)
| 28.804878
| 78
| 0.545301
| 365
| 3,543
| 5.136986
| 0.175342
| 0.119467
| 0.128
| 0.0736
| 0.8176
| 0.800533
| 0.800533
| 0.729067
| 0.707733
| 0.707733
| 0
| 0.006162
| 0.358736
| 3,543
| 122
| 79
| 29.040984
| 0.819102
| 0.04149
| 0
| 0.730337
| 0
| 0
| 0.071112
| 0
| 0
| 0
| 0
| 0
| 0.168539
| 1
| 0.089888
| false
| 0.067416
| 0.022472
| 0
| 0.235955
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4d3ea16518fe69eed5b8f4499a73866538078ace
| 8,714
|
py
|
Python
|
proposals/migrations/0001_initial.py
|
jefftriplett/prorality
|
f7f89dab8eb954aa55b29e5d55c671f02fba91dd
|
[
"BSD-3-Clause"
] | 9
|
2017-11-17T20:51:11.000Z
|
2021-06-03T16:28:57.000Z
|
proposals/migrations/0001_initial.py
|
jefftriplett/prorality
|
f7f89dab8eb954aa55b29e5d55c671f02fba91dd
|
[
"BSD-3-Clause"
] | 3
|
2020-02-11T21:42:25.000Z
|
2021-06-01T21:34:34.000Z
|
proposals/migrations/0001_initial.py
|
jefftriplett/prorality
|
f7f89dab8eb954aa55b29e5d55c671f02fba91dd
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-02-28 02:12
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import markupfield.fields
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('organizations', '0003_auto_20171219_0406'),
]
operations = [
migrations.CreateModel(
name='HistoricalProposal',
fields=[
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False)),
('subject', models.TextField()),
('body', markupfield.fields.MarkupField(blank=True, null=True, rendered_field=True)),
('body_markup_type', models.CharField(choices=[('', '--'), ('html', 'HTML'), ('plain', 'Plain'), ('markdown', 'Markdown')], default='markdown', max_length=30)),
('url', models.TextField(blank=True, null=True)),
('_body_rendered', models.TextField(editable=False, null=True)),
('closing_date', models.DateField(blank=True, null=True)),
('allow_comments', models.BooleanField(default=False)),
('status', models.CharField(choices=[('draft', 'Draft'), ('final', 'Final'), ('withdrawn', 'Withdrawn'), ('accepted', 'Accepted'), ('rejected', 'Rejected')], default='draft', max_length=16)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('created_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('organization', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='organizations.Organization')),
],
options={
'verbose_name': 'historical proposal',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='HistoricalVote',
fields=[
('created', models.DateTimeField(blank=True, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False)),
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False)),
('vote', models.CharField(blank=True, choices=[('plus_one', '+1: Yes, I agree'), ('plus_zero', "+0: I don't feel strongly about it, but I'm okay with this."), ('minus_zero', "-0: I won't get in the way, but I'd rather we didn't do this."), ('minus_one', '-1: I object on the following grounds')], max_length=16, null=True)),
('reason', models.TextField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('created_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical vote',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='Proposal',
fields=[
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('subject', models.TextField()),
('body', markupfield.fields.MarkupField(blank=True, null=True, rendered_field=True)),
('body_markup_type', models.CharField(choices=[('', '--'), ('html', 'HTML'), ('plain', 'Plain'), ('markdown', 'Markdown')], default='markdown', max_length=30)),
('url', models.TextField(blank=True, null=True)),
('_body_rendered', models.TextField(editable=False, null=True)),
('closing_date', models.DateField(blank=True, null=True)),
('allow_comments', models.BooleanField(default=False)),
('status', models.CharField(choices=[('draft', 'Draft'), ('final', 'Final'), ('withdrawn', 'Withdrawn'), ('accepted', 'Accepted'), ('rejected', 'Rejected')], default='draft', max_length=16)),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='created_proposal_set', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='updated_proposal_set', to=settings.AUTH_USER_MODEL)),
('organization', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='organizations.Organization')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Vote',
fields=[
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('vote', models.CharField(blank=True, choices=[('plus_one', '+1: Yes, I agree'), ('plus_zero', "+0: I don't feel strongly about it, but I'm okay with this."), ('minus_zero', "-0: I won't get in the way, but I'd rather we didn't do this."), ('minus_one', '-1: I object on the following grounds')], max_length=16, null=True)),
('reason', models.TextField(blank=True, null=True)),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='created_vote_set', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='updated_vote_set', to=settings.AUTH_USER_MODEL)),
('proposal', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='proposals.Proposal')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='historicalvote',
name='proposal',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='proposals.Proposal'),
),
migrations.AddField(
model_name='historicalvote',
name='user',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='vote',
unique_together=set([('proposal', 'user')]),
),
]
| 70.274194
| 340
| 0.620955
| 963
| 8,714
| 5.445483
| 0.160955
| 0.042715
| 0.045385
| 0.07132
| 0.867658
| 0.867658
| 0.844584
| 0.834478
| 0.834096
| 0.834096
| 0
| 0.009521
| 0.216548
| 8,714
| 123
| 341
| 70.845528
| 0.758606
| 0.007804
| 0
| 0.66087
| 1
| 0.017391
| 0.194724
| 0.013537
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052174
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d4e6322f1a39cb7d58b5a2795028d0f59fb1d93
| 8,649
|
py
|
Python
|
data-mining.py
|
Khaaao/football-analytics
|
415ca40cf14a64c5f91096f34a794f66e53048ac
|
[
"MIT"
] | null | null | null |
data-mining.py
|
Khaaao/football-analytics
|
415ca40cf14a64c5f91096f34a794f66e53048ac
|
[
"MIT"
] | 5
|
2020-03-24T17:28:27.000Z
|
2021-12-13T20:10:47.000Z
|
data-mining.py
|
Khaaao/football-analytics
|
415ca40cf14a64c5f91096f34a794f66e53048ac
|
[
"MIT"
] | null | null | null |
from dotenv import load_dotenv
import os
import json
import csv
import requests
import argparse
# Load env params
load_dotenv()
# print(os.getenv("X-RAPIDAPI-KEY"))
# print(os.getenv("THROTTLING"))
THROTTLING = 40
curr_throttling = 0
headers = {'Content-Type': 'application/json', 'x-rapidapi-key': os.getenv("X-RAPIDAPI-KEY")}
day=[]
########
# TODO DYNAMAC FIXTURE UPLOAD
########
def is_done(league_id):
pass
if not (os.path.isfile('data/football_data.csv')):
with open('data/football_data.csv', 'w', encoding='utf-8') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=';', quotechar='|', quoting=csv.QUOTE_MINIMAL)
# TODO add league info
spamwriter.writerow(["league_id", "fixture_id", "round","team_id", "team_name", "player_id", "player_name", "player_position", "player_rating", "player_minutes_played", "player_is_substitute", "shots_total", "shots_on", "goals_total", "goals_conceded", "goal_assists", "passes_total", "passes_key", "passes_accuracy", "tackles_total", "tackles_blocks", "tackles_interceptions", "duels_total", "duels_won", "dribbles_attempts", "dribbles_success", "dribbles_past", "fouls_drawn", "fouls_committed", "cards_yellow", "cards_red", "penalty_won", "penalty_commited", "penalty_missed", "penalty_saved", "penalty_success"])
with open('data/followed_leagues.json') as followed_leagues_json:
followed_leagues_data = json.load(followed_leagues_json)
for followed_league in followed_leagues_data:
if followed_league["is_curent"] == 0:
if not (os.path.isfile(f'data/fixtures_{followed_league["league_id"]}.json')):
pass
# https://api-football-v1.p.rapidapi.com/v2/fixtures/league/{followed_league["league_id"]}
else:
with open(f'data/fixtures_{followed_league["league_id"]}.json', 'r') as fixtures_json:
fixtures_data = json.load(fixtures_json)
for num, fixture in enumerate(reversed(fixtures_data["api"]["fixtures"]), start=1):
inverse_num = len(fixtures_data["api"]["fixtures"]) - num
if not 'is_integrated' in fixture:
if curr_throttling < THROTTLING:
print(f'ALIM {fixture["fixture_id"]}')
# TODO : Replace mock by api call
# https://api-football-v1.p.rapidapi.com/v2/players/fixture/{fixture}
response = requests.get(f'https://api-football-v1.p.rapidapi.com/v2/players/fixture/{fixture["fixture_id"]}' , headers=headers)
players_data = json.loads(response.content.decode('utf-8'))
print(players_data)
for player in players_data["api"]["players"]:
with open(f'data/fixtures_{followed_league["league_id"]}.json', 'w') as fixtures_json:
tmp = fixtures_data
fixtures_data["api"]["fixtures"][inverse_num]["is_integrated"]=1
json.dump(fixtures_data, fixtures_json)
spamwriter.writerow([followed_league["league_id"], fixture["fixture_id"], fixture["round"] ,player["team_id"], player["team_name"], player["player_id"], player["player_name"], player["position"], player["rating"], player["minutes_played"], player["substitute"], player["shots"]["total"], player["shots"]["on"], player["goals"]["total"], player["goals"]["conceded"], player["goals"]["assists"], player["passes"]["total"], player["passes"]["key"], player["passes"]["accuracy"], player["tackles"]["total"], player["tackles"]["blocks"], player["tackles"]["interceptions"], player["duels"]["total"], player["duels"]["won"], player["dribbles"]["attempts"], player["dribbles"]["success"], player["dribbles"]["past"], player["fouls"]["drawn"], player["fouls"]["committed"], player["cards"]["yellow"], player["cards"]["red"], player["penalty"]["won"], player["penalty"]["commited"], player["penalty"]["success"], player["penalty"]["missed"], player["penalty"]["saved"]])
else:
break
curr_throttling+=1
else:
print(f'ALIM {fixture["fixture_id"]} Already ALIM')
else:
print(f'{followed_league["name"]}_{followed_league["season"]} is not done, use incremental upload instead')
else:
with open('data/football_data.csv', 'a', encoding='utf-8') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=';', quotechar='|', quoting=csv.QUOTE_MINIMAL)
# TODO add league info
with open('data/followed_leagues.json') as followed_leagues_json:
followed_leagues_data = json.load(followed_leagues_json)
for followed_league in followed_leagues_data:
if followed_league["is_curent"] == 0:
if not (os.path.isfile(f'data/fixtures_{followed_league["league_id"]}.json')):
pass
# https://api-football-v1.p.rapidapi.com/v2/fixtures/league/{followed_league["league_id"]}
else:
with open(f'data/fixtures_{followed_league["league_id"]}.json', 'r') as fixtures_json:
fixtures_data = json.load(fixtures_json)
for num, fixture in enumerate(reversed(fixtures_data["api"]["fixtures"]), start=1):
inverse_num = len(fixtures_data["api"]["fixtures"]) - num
if not 'is_integrated' in fixture:
if curr_throttling < THROTTLING:
print(f'ALIM {fixture["fixture_id"]}')
# TODO : Replace mock by api call
# https://api-football-v1.p.rapidapi.com/v2/players/fixture/{fixture}
response = requests.get(f'https://api-football-v1.p.rapidapi.com/v2/players/fixture/{fixture["fixture_id"]}' , headers=headers)
players_data = json.loads(response.content.decode('utf-8'))
for player in players_data["api"]["players"]:
with open(f'data/fixtures_{followed_league["league_id"]}.json', 'w') as fixtures_json:
tmp = fixtures_data
fixtures_data["api"]["fixtures"][inverse_num]["is_integrated"]=1
json.dump(fixtures_data, fixtures_json)
spamwriter.writerow([followed_league["league_id"], fixture["fixture_id"], fixture["round"] ,player["team_id"], player["team_name"], player["player_id"], player["player_name"], player["position"], player["rating"], player["minutes_played"], player["substitute"], player["shots"]["total"], player["shots"]["on"], player["goals"]["total"], player["goals"]["conceded"], player["goals"]["assists"], player["passes"]["total"], player["passes"]["key"], player["passes"]["accuracy"], player["tackles"]["total"], player["tackles"]["blocks"], player["tackles"]["interceptions"], player["duels"]["total"], player["duels"]["won"], player["dribbles"]["attempts"], player["dribbles"]["success"], player["dribbles"]["past"], player["fouls"]["drawn"], player["fouls"]["committed"], player["cards"]["yellow"], player["cards"]["red"], player["penalty"]["won"], player["penalty"]["commited"], player["penalty"]["success"], player["penalty"]["missed"], player["penalty"]["saved"]])
else:
break
curr_throttling+=1
else:
print(f'ALIM {fixture["fixture_id"]} Already ALIM')
else:
print(f'{followed_league["name"]}_{followed_league["season"]} is not done, use incremental upload instead')
| 86.49
| 1,011
| 0.544456
| 873
| 8,649
| 5.213058
| 0.158076
| 0.055372
| 0.043946
| 0.048341
| 0.853878
| 0.841354
| 0.829488
| 0.829488
| 0.829488
| 0.829488
| 0
| 0.004472
| 0.301885
| 8,649
| 99
| 1,012
| 87.363636
| 0.749255
| 0.061163
| 0
| 0.78481
| 0
| 0.025316
| 0.29955
| 0.081582
| 0
| 0
| 0
| 0.010101
| 0
| 1
| 0.012658
| false
| 0.075949
| 0.075949
| 0
| 0.088608
| 0.088608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
4ddf37abf9287f2159800e9e33b3b942c226622d
| 128
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_0/_pkg0_1_1_0_0/_mod0_1_1_0_0_3.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2018-12-29T09:53:39.000Z
|
2018-12-29T09:53:42.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_0/_pkg0_1_1_0_0/_mod0_1_1_0_0_3.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_0/_pkg0_1_1_0_0/_mod0_1_1_0_0_3.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
name0_1_1_0_0_3_0 = None
name0_1_1_0_0_3_1 = None
name0_1_1_0_0_3_2 = None
name0_1_1_0_0_3_3 = None
name0_1_1_0_0_3_4 = None
| 14.222222
| 24
| 0.820313
| 40
| 128
| 1.875
| 0.175
| 0.4
| 0.466667
| 0.533333
| 0.88
| 0.88
| 0.746667
| 0
| 0
| 0
| 0
| 0.318182
| 0.140625
| 128
| 9
| 25
| 14.222222
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
129edbbc76d04f2a48353c4221d5efe7cd4b9ae9
| 9,660
|
py
|
Python
|
tests/test_observable/test_partition.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2018-11-16T09:07:13.000Z
|
2018-11-16T09:07:13.000Z
|
tests/test_observable/test_partition.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/test_observable/test_partition.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-08T08:23:08.000Z
|
2020-05-08T08:23:08.000Z
|
import unittest
from rx.testing import TestScheduler, ReactiveTest
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
def is_even(num):
return +num % 2 == 0
class TestPartition(unittest.TestCase):
def test_partition_empty(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(180, 5),
on_completed(210)
)
subscription1 = [None]
subscription2 = [None]
observables = []
results1 = scheduler.create_observer()
results2 = scheduler.create_observer()
def action0(scheduler, state):
observables.extend(xs.partition(is_even))
scheduler.schedule_absolute(ReactiveTest.created, action0)
def action1(scheduler, state):
subscription1[0] = observables[0].subscribe(results1)
subscription2[0] = observables[1].subscribe(results2)
scheduler.schedule_absolute(ReactiveTest.subscribed, action1)
def action2(scheduler, state):
subscription1[0].dispose()
subscription2[0].dispose()
scheduler.schedule_absolute(ReactiveTest.disposed, action2)
scheduler.start()
assert results1.messages == [
on_completed(210)]
assert results2.messages == [
on_completed(210)]
assert xs.subscriptions == [
subscribe(200, 210)]
def test_partition_single(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(180, 5),
on_next(210, 4),
on_completed(220)
)
observables = []
subscription1 = [None]
subscription2 = [None]
results1 = scheduler.create_observer()
results2 = scheduler.create_observer()
def action0(scheduler, state):
observables.extend(xs.partition(is_even))
scheduler.schedule_absolute(ReactiveTest.created, action0)
def action1(scheduler, state):
subscription1[0] = observables[0].subscribe(results1)
subscription2[0] = observables[1].subscribe(results2)
scheduler.schedule_absolute(ReactiveTest.subscribed, action1)
def action2(scheduler, state):
subscription1[0].dispose()
subscription2[0].dispose()
scheduler.schedule_absolute(ReactiveTest.disposed, action2)
scheduler.start()
assert results1.messages == [
on_next(210, 4),
on_completed(220)]
assert results2.messages == [
on_completed(220)]
assert xs.subscriptions == [
subscribe(200, 220)]
def test_partition_each(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(180, 5),
on_next(210, 4),
on_next(220, 3),
on_completed(230)
)
observables = []
subscription1 = [None]
subscription2 = [None]
results1 = scheduler.create_observer()
results2 = scheduler.create_observer()
def action0(scheduler, state):
observables.extend(xs.partition(is_even))
scheduler.schedule_absolute(ReactiveTest.created, action0)
def action1(scheduler, state):
subscription1[0] = observables[0].subscribe(results1)
subscription2[0] = observables[1].subscribe(results2)
scheduler.schedule_absolute(ReactiveTest.subscribed, action1)
def action2(scheduler, state):
subscription1[0].dispose()
subscription2[0].dispose()
scheduler.schedule_absolute(ReactiveTest.disposed, action2)
scheduler.start()
assert results1.messages == [
on_next(210, 4),
on_completed(230)]
assert results2.messages == [
on_next(220, 3),
on_completed(230)]
assert xs.subscriptions == [
subscribe(200, 230)]
def test_partition_completed(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(180, 5),
on_next(210, 4),
on_next(240, 3),
on_next(290, 2),
on_next(350, 1),
on_completed(360)
)
observables = []
subscription1 = [None]
subscription2 = [None]
results1 = scheduler.create_observer()
results2 = scheduler.create_observer()
def action0(scheduler, state):
observables.extend(xs.partition(is_even))
scheduler.schedule_absolute(ReactiveTest.created, action0)
def action1(scheduler, state):
subscription1[0] = observables[0].subscribe(results1)
subscription2[0] = observables[1].subscribe(results2)
scheduler.schedule_absolute(ReactiveTest.subscribed, action1)
def action2(scheduler, state):
subscription1[0].dispose()
subscription2[0].dispose()
scheduler.schedule_absolute(ReactiveTest.disposed, action2)
scheduler.start()
assert results1.messages == [
on_next(210, 4),
on_next(290, 2),
on_completed(360)]
assert results2.messages == [
on_next(240, 3),
on_next(350, 1),
on_completed(360)]
assert xs.subscriptions == [
subscribe(200, 360)]
def test_partition_not_completed(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(180, 5),
on_next(210, 4),
on_next(240, 3),
on_next(290, 2),
on_next(350, 1)
)
observables = []
subscription1 = [None]
subscription2 = [None]
results1 = scheduler.create_observer()
results2 = scheduler.create_observer()
def action0(scheduler, state):
observables.extend(xs.partition(is_even))
scheduler.schedule_absolute(ReactiveTest.created, action0)
def action1(scheduler, state):
subscription1[0] = observables[0].subscribe(results1)
subscription2[0] = observables[1].subscribe(results2)
scheduler.schedule_absolute(ReactiveTest.subscribed, action1)
def action2(scheduler, state):
subscription1[0].dispose()
subscription2[0].dispose()
scheduler.schedule_absolute(ReactiveTest.disposed, action2)
scheduler.start()
assert results1.messages == [
on_next(210, 4),
on_next(290, 2)]
assert results2.messages == [
on_next(240, 3),
on_next(350, 1)]
assert xs.subscriptions == [
subscribe(200, 1000)]
def test_partition_error(self):
error = Exception()
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(180, 5),
on_next(210, 4),
on_next(240, 3),
on_error(290, error),
on_next(350, 1),
on_completed(360)
)
observables = []
subscription1 = [None]
subscription2 = [None]
results1 = scheduler.create_observer()
results2 = scheduler.create_observer()
def action0(scheduler, state):
observables.extend(xs.partition(is_even))
scheduler.schedule_absolute(ReactiveTest.created, action0)
def action1(scheduler, state):
subscription1[0] = observables[0].subscribe(results1)
subscription2[0] = observables[1].subscribe(results2)
scheduler.schedule_absolute(ReactiveTest.subscribed, action1)
def action2(scheduler, state):
subscription1[0].dispose()
subscription2[0].dispose()
scheduler.schedule_absolute(ReactiveTest.disposed, action2)
scheduler.start()
assert results1.messages == [
on_next(210, 4),
on_error(290, error)]
assert results2.messages == [
on_next(240, 3),
on_error(290, error)]
assert xs.subscriptions == [
subscribe(200, 290)]
def test_partition_disposed(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(180, 5),
on_next(210, 4),
on_next(240, 3),
on_next(290, 2),
on_next(350, 1),
on_completed(360)
)
observables = []
subscription1 = [None]
subscription2 = [None]
results1 = scheduler.create_observer()
results2 = scheduler.create_observer()
def action0(scheduler, state):
observables.extend(xs.partition(is_even))
scheduler.schedule_absolute(ReactiveTest.created, action0)
def action1(scheduler, state):
subscription1[0] = observables[0].subscribe(results1)
subscription2[0] = observables[1].subscribe(results2)
scheduler.schedule_absolute(ReactiveTest.subscribed, action1)
def action2(scheduler, state):
subscription1[0].dispose()
subscription2[0].dispose()
scheduler.schedule_absolute(280, action2)
scheduler.start()
assert results1.messages == [
on_next(210, 4)]
assert results2.messages == [
on_next(240, 3)]
assert xs.subscriptions == [
subscribe(200, 280)]
| 29.18429
| 69
| 0.598965
| 914
| 9,660
| 6.182713
| 0.078775
| 0.044594
| 0.092904
| 0.13095
| 0.898071
| 0.836312
| 0.836312
| 0.818085
| 0.807468
| 0.807468
| 0
| 0.063194
| 0.300518
| 9,660
| 330
| 70
| 29.272727
| 0.773124
| 0
| 0
| 0.820408
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 1
| 0.118367
| false
| 0
| 0.008163
| 0.004082
| 0.134694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4280933515c69e4be810414bc665b3c512c85944
| 4,734
|
py
|
Python
|
hw12/code/newton.py
|
nanako-seven/CS2601
|
8301ea1524a8ae9c6a219687d56c2542beb93736
|
[
"BSD-3-Clause"
] | null | null | null |
hw12/code/newton.py
|
nanako-seven/CS2601
|
8301ea1524a8ae9c6a219687d56c2542beb93736
|
[
"BSD-3-Clause"
] | null | null | null |
hw12/code/newton.py
|
nanako-seven/CS2601
|
8301ea1524a8ae9c6a219687d56c2542beb93736
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
# def newton(fp, fpp, x0, tol=1e-5, maxiter=100000):
# """
# fp: function that takes an input x and returns the gradient of f at x
# fpp: function that takes an input x and returns the Hessian of f at x
# x0: initial point
# tol: toleracne parameter in the stopping crieterion. Newton's method stops
# when the 2-norm of the gradient is smaller than tol
# maxiter: maximum number of iterations
# This function should return a list of the sequence of approximate solutions
# x_k produced by each iteration
# """
# x_traces = [np.array(x0)]
# x = np.array(x0)
# # START OF YOUR CODE
# pass
# # END OF YOUR CODE
# return x_traces
# def damped_newton(f, fp, fpp, x0, alpha=0.5, beta=0.5, tol=1e-5, maxiter=100000):
# """
# f: function that takes an input x an returns the value of f at x
# fp: function that takes an input x and returns the gradient of f at x
# fpp: function that takes an input x and returns the Hessian of f at x
# x0: initial point in gradient descent
# alpha: parameter in Armijo's rule
# f(x + t * d) > f(x) + t * alpha * <f'(x), d>
# beta: constant factor used in stepsize reduction
# tol: toleracne parameter in the stopping crieterion. Here we stop
# when the 2-norm of the gradient is smaller than tol
# maxiter: maximum number of iterations in gradient descent.
# This function should return a list of the sequence of approximate solutions
# x_k produced by each iteration and the total number of iterations in the inner loop
# """
# x_traces = [np.array(x0)]
# stepsize_traces = []
# tot_num_iter = 0
# x = np.array(x0)
# for it in range(maxiter):
# # START OF YOUR CODE
# pass
# # END OF YOUR CODE
# x_traces.append(np.array(x))
# stepsize_traces.append(stepsize)
# return x_traces, stepsize_traces, tot_num_iter
def newton_eq(f, fp, fpp, x0, A, b, initial_stepsize=1.0, alpha=0.5, beta=0.5, tol=1e-8, maxiter=100000):
"""
f: function that takes an input x an returns the value of f at x
fp: function that takes an input x and returns the gradient of f at x
fpp: function that takes an input x and returns the Hessian of f at x
A, b: constraint A x = b
x0: initial feasible point
initial_stepsize: initial stepsize used in backtracking line search
alpha: parameter in Armijo's rule
f(x + t * d) > f(x) + t * alpha * f(x) @ d
beta: constant factor used in stepsize reduction
tol: toleracne parameter in the stopping crieterion. Gradient descent stops
when the 2-norm of the Newton direction is smaller than tol
maxiter: maximum number of iterations in outer loop of damped Newton's method.
This function should return a list of the iterates x_k
"""
x_traces = [np.array(x0)]
m = len(b)
x = np.array(x0)
for it in range(maxiter):
A2 = np.block([[fpp(x), A.T], [A, np.zeros((m, m))]])
b2 = np.concatenate([-fp(x), np.zeros((m,))])
d = np.linalg.solve(A2, b2)[:-m]
t = initial_stepsize
while f(x + t * d) > f(x) + alpha * t * np.dot(fp(x), d):
t *= beta
x += t * d
x_traces.append(x.copy())
if np.linalg.norm(d) < tol:
break
return x_traces
def newton_eq_2(f, fp, fpp, x0, A, b, initial_stepsize=1.0, alpha=0.5, beta=0.5, tol=1e-8, maxiter=100000):
"""
f: function that takes an input x an returns the value of f at x
fp: function that takes an input x and returns the gradient of f at x
fpp: function that takes an input x and returns the Hessian of f at x
A, b: constraint A x = b
x0: initial feasible point
initial_stepsize: initial stepsize used in backtracking line search
alpha: parameter in Armijo's rule
f(x + t * d) > f(x) + t * alpha * f(x) @ d
beta: constant factor used in stepsize reduction
tol: toleracne parameter in the stopping crieterion. Gradient descent stops
when the 2-norm of the Newton direction is smaller than tol
maxiter: maximum number of iterations in outer loop of damped Newton's method.
This function should return a list of the iterates x_k
"""
x_traces = [np.array(x0)]
m = len(b)
x = np.array(x0)
for it in range(maxiter):
A2 = np.block([[fpp(x), A.T], [A, np.zeros((m, m))]])
b2 = np.concatenate([-fp(x), np.zeros((m,))])
d = np.linalg.solve(A2, b2)[:-m]
t = initial_stepsize
# 防止出界
while not (x + t * d > 0).all() or f(x + t * d) > f(x) + alpha * t * np.dot(fp(x), d):
t *= beta
x += t * d
x_traces.append(x.copy())
if np.linalg.norm(d) < tol:
break
return x_traces
| 36.415385
| 107
| 0.634559
| 797
| 4,734
| 3.728984
| 0.156838
| 0.008748
| 0.062921
| 0.070323
| 0.903432
| 0.869785
| 0.869785
| 0.853297
| 0.847241
| 0.815949
| 0
| 0.022656
| 0.263414
| 4,734
| 129
| 108
| 36.697674
| 0.829653
| 0.680608
| 0
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.030303
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
428bdb8fbdb9acac08ba4add956b6eb9e258cac5
| 20,923
|
py
|
Python
|
jedisim/jedigridconfiggenerator.py
|
rbliu/Useful_Scripts
|
677a3cd079aab1df3fef87d2b6c6b1763b5d6d9e
|
[
"BSD-3-Clause"
] | null | null | null |
jedisim/jedigridconfiggenerator.py
|
rbliu/Useful_Scripts
|
677a3cd079aab1df3fef87d2b6c6b1763b5d6d9e
|
[
"BSD-3-Clause"
] | null | null | null |
jedisim/jedigridconfiggenerator.py
|
rbliu/Useful_Scripts
|
677a3cd079aab1df3fef87d2b6c6b1763b5d6d9e
|
[
"BSD-3-Clause"
] | null | null | null |
#! /usr/bin/env python
#last modified 8 July 2013
import os, sys, subprocess, math, re
#parse command line inputs
if(len(sys.argv) != 1):
print("Usage: jedimaster config")
sys.exit(1)
set = "production_1_grids/"
#masses = [2,5,10,20]
#concentrations = [4.0]
mass = 10
con = 4
#actual shears = [0.4, 0.35, 0.3, 0.25, 0.2, 0.15, 0.1]
#pixel distance for desired shear
shears = [317.7010, 378.9446, 470.8099, 631.5744, 960.7586, 1645.9211, 3291.8422]
directions=[0., 0.523599, 1.0472, 1.5708, 2.0944, 2.61799]
trials = 1
convolved_path = set
if not os.path.exists(set):
os.makedirs(set)
for s,shear in enumerate(shears):
for d,direct in enumerate(directions):
lens_file = "%slenses_%i_%i.txt"%(set,s,d)
f = open("%slenses_%i_%i.txt"%(set,s,d),'w')
f.write("6144 6144 2 %f %f"%(mass,con))
f.close()
for n in xrange(0,trials):
r = s*len(directions)*trials + d*trials + n
print "s: %f\td: %f\t n: %i\t r: %i"%(shear,direct,n,r)
f = open("%sconfig_%i.txt"%(set,r),'w')
config = """#---------------------physics settings-----------------------
pix_scale=0.03 #pixel scale to use (arseconds per pixel)
final_pix_scale=0.2 #LSST pixscale (arcsecords per pixel)
exp_time=6000 #exposure time in seconds
noise_mean=10 #mean for poisson noise
nx=12288 #number of pixels in the x direction
ny=12288 #number of pixels in the y direction
x_border=301 #must be large enough so that no image can overflow_
y_border=301 #must be large enough so that no image can overflow
x_trim=0 #larger than x_border to ensure no edge effects
y_trim=0 #larger than y_border to ensure no edge effects
num_galaxies=1000 #number of galaxies to simulate 138,000 default
min_mag=22 #minimum magnitude galaxy to simulate (inclusive)
max_mag=23 #maximum magnitude galaxy to simulate (inclusive)
single_redshift=1 #use a single source galaxy redshift? 0 = no, 1=yes
fixed_redshift=1.5 #the single source galaxy redshift to use
power=0.33 #power for the power law galaxy distribution
lens_z=0.3 #the redshift of the lenses
lenses_file="physics_settings/%s" #catalog of lenses to use
psf_file="physics_settings/psf_scalednew.fits" #the PSF to use
90_psf_file="physics_settings/psf_scalednew.fits"
#--------------------output settings--------------------------
output_folder="trial_%i/"
prefix="trial%i_"
HST_image="HST.fits"
HST_convolved_image="HST_convolved.fits"
LSST_convolved_image="LSST_convolved.fits"
LSST_convolved_noise_image="LSST_convolved_noise.fits"
grid_radius=%f
grid_angle=%f
#-----------database folders----------------------------------
#must contain files "n.txt" for n= min_mag to max_mag
radius_db_folder="simdatabase/radius_db/"
red_db_folder="simdatabase/red_db/"
#-----------catalog file locations-----------------------------
catalog_file="catalog.txt"
dislist_file="dislist.txt"
convlist_file="toconvolvelist.txt"
dislist_grid_file="dislist_grid.txt"
distortedlist_file="distortedlist.txt"
convolvedlist_file="convolvedlist.txt"
#-----------source images-------------------------------------
num_source_images=327
#all postage stamp images should be on their own line, prefaced with image
#postage stamp images should be fits" file including the following header entries:
#MAG: magnitude of the postage stamp image
#MAG0: magnitude zeropoint of the postage stamp image
#PIXSCALE: pixel scale of the postage stamp image
#RADIUS: R50 radius of the image, in pixels
image="simdatabase/doneall2/scaled_finalnew_galaxy_10.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_11.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_12.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_13.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_14.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_15.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_16.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_17.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_18.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_19.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_1.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_20.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_21.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_22.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_23.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_24.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_25.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_26.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_27.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_28.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_29.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_2.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_30.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_31.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_32.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_33.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_34.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_35.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_36.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_37.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_38.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_39.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_3.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_40.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_41.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_42.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_43.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_44.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_45.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_46.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_47.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_48.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_49.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_4.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_50.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_51.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_52.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_53.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_54.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_55.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_56.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_57.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_58.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_59.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_5.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_60.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_61.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_62.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_63.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_64.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_65.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_66.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_67.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_68.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_69.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_6.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_70.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_71.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_72.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_73.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_74.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_75.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_76.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_77.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_78.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_79.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_7.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_80.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_81.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_82.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_83.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_84.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_85.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_86.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_87.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_88.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_89.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_8.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_90.fits"
image="simdatabase/doneall2/scaled_finalnew_galaxy_9.fits"
image="simdatabase/ivydoneall3/0.fits"
image="simdatabase/ivydoneall3/10.fits"
image="simdatabase/ivydoneall3/11.fits"
image="simdatabase/ivydoneall3/12.fits"
image="simdatabase/ivydoneall3/13.fits"
image="simdatabase/ivydoneall3/14.fits"
image="simdatabase/ivydoneall3/15.fits"
image="simdatabase/ivydoneall3/16.fits"
image="simdatabase/ivydoneall3/17.fits"
image="simdatabase/ivydoneall3/18.fits"
image="simdatabase/ivydoneall3/19.fits"
image="simdatabase/ivydoneall3/1.fits"
image="simdatabase/ivydoneall3/20.fits"
image="simdatabase/ivydoneall3/22.fits"
image="simdatabase/ivydoneall3/23.fits"
image="simdatabase/ivydoneall3/25.fits"
image="simdatabase/ivydoneall3/27.fits"
image="simdatabase/ivydoneall3/28.fits"
image="simdatabase/ivydoneall3/29.fits"
image="simdatabase/ivydoneall3/2.fits"
image="simdatabase/ivydoneall3/31.fits"
image="simdatabase/ivydoneall3/32.fits"
image="simdatabase/ivydoneall3/33.fits"
image="simdatabase/ivydoneall3/34.fits"
image="simdatabase/ivydoneall3/39.fits"
image="simdatabase/ivydoneall3/3.fits"
image="simdatabase/ivydoneall3/40.fits"
image="simdatabase/ivydoneall3/41.fits"
image="simdatabase/ivydoneall3/43.fits"
image="simdatabase/ivydoneall3/44.fits"
image="simdatabase/ivydoneall3/45.fits"
image="simdatabase/ivydoneall3/4.fits"
image="simdatabase/ivydoneall3/5.fits"
image="simdatabase/ivydoneall3/6.fits"
image="simdatabase/ivydoneall3/8.fits"
image="simdatabase/ivydoneall3/9.fits"
image="simdatabase/CANDELS/sect23_f606w_gal0.fits"
image="simdatabase/CANDELS/sect23_f606w_gal1.fits"
image="simdatabase/CANDELS/sect23_f606w_gal2.fits"
image="simdatabase/CANDELS/sect23_f606w_gal3.fits"
image="simdatabase/CANDELS/sect23_f606w_gal4.fits"
image="simdatabase/CANDELS/sect23_f606w_gal5.fits"
image="simdatabase/CANDELS/sect23_f606w_gal6.fits"
image="simdatabase/CANDELS/sect23_f606w_gal7.fits"
image="simdatabase/CANDELS/sect23_f606w_gal8.fits"
image="simdatabase/CANDELS/sect23_f606w_gal9.fits"
image="simdatabase/CANDELS/sect23_f606w_gal10.fits"
image="simdatabase/CANDELS/sect23_f606w_gal11.fits"
image="simdatabase/CANDELS/sect23_f606w_gal12.fits"
image="simdatabase/CANDELS/sect23_f606w_gal13.fits"
image="simdatabase/CANDELS/sect23_f606w_gal14.fits"
image="simdatabase/CANDELS/sect23_f606w_gal15.fits"
image="simdatabase/CANDELS/sect23_f606w_gal16.fits"
image="simdatabase/CANDELS/sect23_f606w_gal17.fits"
image="simdatabase/CANDELS/sect23_f606w_gal18.fits"
image="simdatabase/CANDELS/sect23_f606w_gal19.fits"
image="simdatabase/CANDELS/sect23_f606w_gal20.fits"
image="simdatabase/CANDELS/sect23_f606w_gal21.fits"
image="simdatabase/CANDELS/sect23_f606w_gal22.fits"
image="simdatabase/CANDELS/sect23_f606w_gal23.fits"
image="simdatabase/CANDELS/sect23_f606w_gal24.fits"
image="simdatabase/CANDELS/sect23_f606w_gal25.fits"
image="simdatabase/CANDELS/sect23_f606w_gal26.fits"
image="simdatabase/CANDELS/sect23_f606w_gal27.fits"
image="simdatabase/CANDELS/sect23_f606w_gal28.fits"
image="simdatabase/CANDELS/sect23_f606w_gal29.fits"
image="simdatabase/CANDELS/sect23_f606w_gal30.fits"
image="simdatabase/CANDELS/sect23_f606w_gal31.fits"
image="simdatabase/CANDELS/sect23_f606w_gal32.fits"
image="simdatabase/CANDELS/sect23_f606w_gal33.fits"
image="simdatabase/CANDELS/sect23_f606w_gal34.fits"
image="simdatabase/CANDELS/sect23_f606w_gal35.fits"
image="simdatabase/CANDELS/sect23_f606w_gal36.fits"
image="simdatabase/CANDELS/sect23_f606w_gal37.fits"
image="simdatabase/CANDELS/sect23_f606w_gal38.fits"
image="simdatabase/CANDELS/sect23_f606w_gal39.fits"
image="simdatabase/CANDELS/sect23_f606w_gal40.fits"
image="simdatabase/CANDELS/sect23_f606w_gal41.fits"
image="simdatabase/CANDELS/sect23_f606w_gal42.fits"
image="simdatabase/CANDELS/sect23_f606w_gal43.fits"
image="simdatabase/CANDELS/sect23_f606w_gal44.fits"
image="simdatabase/CANDELS/sect23_f606w_gal45.fits"
image="simdatabase/CANDELS/sect23_f606w_gal46.fits"
image="simdatabase/CANDELS/sect23_f606w_gal47.fits"
image="simdatabase/CANDELS/sect23_f606w_gal48.fits"
image="simdatabase/CANDELS/sect23_f606w_gal49.fits"
image="simdatabase/CANDELS/sect23_f606w_gal50.fits"
image="simdatabase/CANDELS/sect23_f606w_gal51.fits"
image="simdatabase/CANDELS/sect23_f606w_gal52.fits"
image="simdatabase/CANDELS/sect23_f606w_gal53.fits"
image="simdatabase/CANDELS/sect23_f606w_gal54.fits"
image="simdatabase/CANDELS/sect23_f606w_gal55.fits"
image="simdatabase/CANDELS/sect23_f606w_gal56.fits"
image="simdatabase/CANDELS/sect23_f606w_gal57.fits"
image="simdatabase/CANDELS/sect23_f606w_gal58.fits"
image="simdatabase/CANDELS/sect23_f606w_gal59.fits"
image="simdatabase/CANDELS/sect23_f606w_gal60.fits"
image="simdatabase/CANDELS/sect23_f606w_gal61.fits"
image="simdatabase/CANDELS/sect23_f606w_gal62.fits"
image="simdatabase/CANDELS/sect23_f606w_gal63.fits"
image="simdatabase/CANDELS/sect23_f606w_gal64.fits"
image="simdatabase/CANDELS/sect23_f606w_gal65.fits"
image="simdatabase/CANDELS/sect23_f606w_gal66.fits"
image="simdatabase/CANDELS/sect23_f606w_gal67.fits"
image="simdatabase/CANDELS/sect23_f606w_gal68.fits"
image="simdatabase/CANDELS/sect23_f606w_gal69.fits"
image="simdatabase/CANDELS/sect23_f606w_gal70.fits"
image="simdatabase/CANDELS/sect23_f606w_gal71.fits"
image="simdatabase/CANDELS/sect23_f606w_gal72.fits"
image="simdatabase/CANDELS/sect23_f606w_gal73.fits"
image="simdatabase/CANDELS/sect23_f606w_gal74.fits"
image="simdatabase/CANDELS/sect23_f606w_gal75.fits"
image="simdatabase/CANDELS/sect23_f606w_gal76.fits"
image="simdatabase/CANDELS/sect23_f606w_gal77.fits"
image="simdatabase/CANDELS/sect23_f606w_gal78.fits"
image="simdatabase/CANDELS/sect23_f606w_gal79.fits"
image="simdatabase/CANDELS/sect23_f606w_gal80.fits"
image="simdatabase/CANDELS/sect23_f606w_gal81.fits"
image="simdatabase/CANDELS/sect23_f606w_gal82.fits"
image="simdatabase/CANDELS/sect23_f606w_gal83.fits"
image="simdatabase/CANDELS/sect23_f606w_gal84.fits"
image="simdatabase/CANDELS/sect23_f606w_gal85.fits"
image="simdatabase/CANDELS/sect23_f606w_gal86.fits"
image="simdatabase/CANDELS/sect23_f606w_gal87.fits"
image="simdatabase/CANDELS/sect23_f606w_gal88.fits"
image="simdatabase/CANDELS/sect23_f606w_gal89.fits"
image="simdatabase/CANDELS/sect23_f606w_gal90.fits"
image="simdatabase/CANDELS/sect23_f606w_gal91.fits"
image="simdatabase/CANDELS/sect23_f606w_gal92.fits"
image="simdatabase/CANDELS/sect23_f606w_gal93.fits"
image="simdatabase/CANDELS/sect23_f606w_gal94.fits"
image="simdatabase/CANDELS/sect23_f606w_gal95.fits"
image="simdatabase/CANDELS/sect23_f606w_gal96.fits"
image="simdatabase/CANDELS/sect23_f606w_gal97.fits"
image="simdatabase/CANDELS/sect23_f606w_gal98.fits"
image="simdatabase/CANDELS/sect23_f606w_gal99.fits"
image="simdatabase/CANDELS/sect23_f606w_gal100.fits"
image="simdatabase/CANDELS/sect23_f606w_gal101.fits"
image="simdatabase/CANDELS/sect23_f606w_gal102.fits"
image="simdatabase/CANDELS/sect23_f606w_gal103.fits"
image="simdatabase/CANDELS/sect23_f606w_gal104.fits"
image="simdatabase/CANDELS/sect23_f606w_gal105.fits"
image="simdatabase/CANDELS/sect23_f606w_gal106.fits"
image="simdatabase/CANDELS/sect23_f606w_gal107.fits"
image="simdatabase/CANDELS/sect23_f606w_gal108.fits"
image="simdatabase/CANDELS/sect23_f606w_gal109.fits"
image="simdatabase/CANDELS/sect23_f606w_gal110.fits"
image="simdatabase/CANDELS/sect23_f606w_gal111.fits"
image="simdatabase/CANDELS/sect23_f606w_gal112.fits"
image="simdatabase/CANDELS/sect23_f606w_gal113.fits"
image="simdatabase/CANDELS/sect23_f606w_gal114.fits"
image="simdatabase/CANDELS/sect23_f606w_gal115.fits"
image="simdatabase/CANDELS/sect23_f606w_gal116.fits"
image="simdatabase/CANDELS/sect23_f606w_gal117.fits"
image="simdatabase/CANDELS/sect23_f606w_gal118.fits"
image="simdatabase/CANDELS/sect23_f606w_gal119.fits"
image="simdatabase/CANDELS/sect23_f606w_gal120.fits"
image="simdatabase/CANDELS/sect23_f606w_gal121.fits"
image="simdatabase/CANDELS/sect23_f606w_gal122.fits"
image="simdatabase/CANDELS/sect23_f606w_gal123.fits"
image="simdatabase/CANDELS/sect23_f606w_gal124.fits"
image="simdatabase/CANDELS/sect23_f606w_gal125.fits"
image="simdatabase/CANDELS/sect23_f606w_gal126.fits"
image="simdatabase/CANDELS/sect23_f606w_gal127.fits"
image="simdatabase/CANDELS/sect23_f606w_gal128.fits"
image="simdatabase/CANDELS/sect23_f606w_gal129.fits"
image="simdatabase/CANDELS/sect23_f606w_gal130.fits"
image="simdatabase/CANDELS/sect23_f606w_gal131.fits"
image="simdatabase/CANDELS/sect23_f606w_gal132.fits"
image="simdatabase/CANDELS/sect23_f606w_gal133.fits"
image="simdatabase/CANDELS/sect23_f606w_gal134.fits"
image="simdatabase/CANDELS/sect23_f606w_gal135.fits"
image="simdatabase/CANDELS/sect23_f606w_gal136.fits"
image="simdatabase/CANDELS/sect23_f606w_gal137.fits"
image="simdatabase/CANDELS/sect23_f606w_gal138.fits"
image="simdatabase/CANDELS/sect23_f606w_gal139.fits"
image="simdatabase/CANDELS/sect23_f606w_gal140.fits"
image="simdatabase/CANDELS/sect23_f606w_gal141.fits"
image="simdatabase/CANDELS/sect23_f606w_gal142.fits"
image="simdatabase/CANDELS/sect23_f606w_gal143.fits"
image="simdatabase/CANDELS/sect23_f606w_gal144.fits"
image="simdatabase/CANDELS/sect23_f606w_gal145.fits"
image="simdatabase/CANDELS/sect23_f606w_gal146.fits"
image="simdatabase/CANDELS/sect23_f606w_gal147.fits"
image="simdatabase/CANDELS/sect23_f606w_gal148.fits"
image="simdatabase/CANDELS/sect23_f606w_gal149.fits"
image="simdatabase/CANDELS/sect23_f606w_gal150.fits"
image="simdatabase/CANDELS/sect23_f606w_gal151.fits"
image="simdatabase/CANDELS/sect23_f606w_gal152.fits"
image="simdatabase/CANDELS/sect23_f606w_gal153.fits"
image="simdatabase/CANDELS/sect23_f606w_gal154.fits"
image="simdatabase/CANDELS/sect23_f606w_gal155.fits"
image="simdatabase/CANDELS/sect23_f606w_gal156.fits"
image="simdatabase/CANDELS/sect23_f606w_gal157.fits"
image="simdatabase/CANDELS/sect23_f606w_gal158.fits"
image="simdatabase/CANDELS/sect23_f606w_gal159.fits"
image="simdatabase/CANDELS/sect23_f606w_gal160.fits"
image="simdatabase/CANDELS/sect23_f606w_gal161.fits"
image="simdatabase/CANDELS/sect23_f606w_gal162.fits"
image="simdatabase/CANDELS/sect23_f606w_gal163.fits"
image="simdatabase/CANDELS/sect23_f606w_gal164.fits"
image="simdatabase/CANDELS/sect23_f606w_gal165.fits"
image="simdatabase/CANDELS/sect23_f606w_gal166.fits"
image="simdatabase/CANDELS/sect23_f606w_gal167.fits"
image="simdatabase/CANDELS/sect23_f606w_gal168.fits"
image="simdatabase/CANDELS/sect23_f606w_gal169.fits"
image="simdatabase/CANDELS/sect23_f606w_gal170.fits"
image="simdatabase/CANDELS/sect23_f606w_gal171.fits"
image="simdatabase/CANDELS/sect23_f606w_gal172.fits"
image="simdatabase/CANDELS/sect23_f606w_gal173.fits"
image="simdatabase/CANDELS/sect23_f606w_gal174.fits"
image="simdatabase/CANDELS/sect23_f606w_gal175.fits"
image="simdatabase/CANDELS/sect23_f606w_gal176.fits"
image="simdatabase/CANDELS/sect23_f606w_gal177.fits"
image="simdatabase/CANDELS/sect23_f606w_gal178.fits"
image="simdatabase/CANDELS/sect23_f606w_gal179.fits"
image="simdatabase/CANDELS/sect23_f606w_gal180.fits"
image="simdatabase/CANDELS/sect23_f606w_gal181.fits"
image="simdatabase/CANDELS/sect23_f606w_gal182.fits"
image="simdatabase/CANDELS/sect23_f606w_gal183.fits"
image="simdatabase/CANDELS/sect23_f606w_gal184.fits"
image="simdatabase/CANDELS/sect23_f606w_gal185.fits"
image="simdatabase/CANDELS/sect23_f606w_gal186.fits"
image="simdatabase/CANDELS/sect23_f606w_gal187.fits"
image="simdatabase/CANDELS/sect23_f606w_gal188.fits"
image="simdatabase/CANDELS/sect23_f606w_gal189.fits"
image="simdatabase/CANDELS/sect23_f606w_gal190.fits"
image="simdatabase/CANDELS/sect23_f606w_gal191.fits"
image="simdatabase/CANDELS/sect23_f606w_gal192.fits"
image="simdatabase/CANDELS/sect23_f606w_gal193.fits"
image="simdatabase/CANDELS/sect23_f606w_gal194.fits"
image="simdatabase/CANDELS/sect23_f606w_gal195.fits"
image="simdatabase/CANDELS/sect23_f606w_gal196.fits"
image="simdatabase/CANDELS/sect23_f606w_gal197.fits"
image="simdatabase/CANDELS/sect23_f606w_gal198.fits"
image="simdatabase/CANDELS/sect23_f606w_gal199.fits"
image="simdatabase/CANDELS/sect23_f606w_gal200.fits"
"""%(lens_file,r,r,shear,direct)
f.write(config)
f.close()
| 50.055024
| 82
| 0.837069
| 2,869
| 20,923
| 5.845242
| 0.17602
| 0.311986
| 0.38879
| 0.323614
| 0.742516
| 0.731425
| 0.267263
| 0.005963
| 0.005963
| 0.005963
| 0
| 0.101913
| 0.040482
| 20,923
| 417
| 83
| 50.17506
| 0.733413
| 0.009511
| 0
| 0.004975
| 0
| 0.002488
| 0.962831
| 0.862377
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.002488
| null | null | 0.004975
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c44541c05cbf1b8632712583d8741d064ac2dcec
| 2,334
|
py
|
Python
|
tests/itemmining_tests.py
|
wangli320/pymining
|
41cb6650d6866aee5d373989921ea33730fb8562
|
[
"BSD-3-Clause"
] | 394
|
2015-01-16T11:45:22.000Z
|
2022-03-17T06:55:28.000Z
|
tests/itemmining_tests.py
|
SunilDSK/pymining
|
41cb6650d6866aee5d373989921ea33730fb8562
|
[
"BSD-3-Clause"
] | 7
|
2015-03-31T07:02:53.000Z
|
2017-03-07T05:05:36.000Z
|
tests/itemmining_tests.py
|
SunilDSK/pymining
|
41cb6650d6866aee5d373989921ea33730fb8562
|
[
"BSD-3-Clause"
] | 114
|
2015-02-07T10:55:10.000Z
|
2022-02-26T16:40:03.000Z
|
import unittest
from pymining import itemmining, perftesting
class TestItemSetAlgo(unittest.TestCase):
def test_relim(self):
ts1 = perftesting.get_default_transactions()
relim_input = itemmining.get_relim_input(ts1)
report = itemmining.relim(relim_input, 2)
self.assertEqual(17, len(report))
self.assertEqual(6, report[frozenset(['b', 'd'])])
ts2 = perftesting.get_default_transactions_alt()
relim_input = itemmining.get_relim_input(ts2)
report = itemmining.relim(relim_input, 2)
self.assertEqual(19, len(report))
self.assertEqual(5, report[frozenset(['a', 'b'])])
def test_sam(self):
ts1 = perftesting.get_default_transactions()
sam_input = itemmining.get_sam_input(ts1)
report = itemmining.sam(sam_input, 2)
self.assertEqual(17, len(report))
self.assertEqual(6, report[frozenset(['b', 'd'])])
ts2 = perftesting.get_default_transactions_alt()
sam_input = itemmining.get_sam_input(ts2)
report = itemmining.sam(sam_input, 2)
self.assertEqual(19, len(report))
self.assertEqual(5, report[frozenset(['a', 'b'])])
def test_fpgrowth_pruning_on(self):
ts1 = perftesting.get_default_transactions()
fp_input = itemmining.get_fptree(ts1)
report = itemmining.fpgrowth(fp_input, 2, pruning=True)
self.assertEqual(17, len(report))
self.assertEqual(6, report[frozenset(['b', 'd'])])
ts2 = perftesting.get_default_transactions_alt()
fp_input = itemmining.get_fptree(ts2)
report = itemmining.fpgrowth(fp_input, 2, pruning=True)
self.assertEqual(19, len(report))
self.assertEqual(5, report[frozenset(['a', 'b'])])
def test_fpgrowth_pruning_off(self):
ts1 = perftesting.get_default_transactions()
fp_input = itemmining.get_fptree(ts1)
report = itemmining.fpgrowth(fp_input, 2, pruning=False)
self.assertEqual(17, len(report))
self.assertEqual(6, report[frozenset(['b', 'd'])])
ts2 = perftesting.get_default_transactions_alt()
fp_input = itemmining.get_fptree(ts2)
report = itemmining.fpgrowth(fp_input, 2, pruning=False)
self.assertEqual(19, len(report))
self.assertEqual(5, report[frozenset(['a', 'b'])])
| 40.241379
| 64
| 0.664096
| 279
| 2,334
| 5.354839
| 0.146953
| 0.160643
| 0.11245
| 0.176707
| 0.919009
| 0.919009
| 0.782463
| 0.782463
| 0.718206
| 0.718206
| 0
| 0.025988
| 0.208655
| 2,334
| 57
| 65
| 40.947368
| 0.782891
| 0
| 0
| 0.765957
| 0
| 0
| 0.006855
| 0
| 0
| 0
| 0
| 0
| 0.340426
| 1
| 0.085106
| false
| 0
| 0.042553
| 0
| 0.148936
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c455acd17425c47f3ca19f68959f574c91432415
| 15,430
|
py
|
Python
|
src/share_rest_api/tests/test_share.py
|
flecoqui/sharing-data-rest-api
|
a468861c941099f3a1546f764e76194f6a9ff066
|
[
"MIT"
] | null | null | null |
src/share_rest_api/tests/test_share.py
|
flecoqui/sharing-data-rest-api
|
a468861c941099f3a1546f764e76194f6a9ff066
|
[
"MIT"
] | null | null | null |
src/share_rest_api/tests/test_share.py
|
flecoqui/sharing-data-rest-api
|
a468861c941099f3a1546f764e76194f6a9ff066
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from unittest.mock import patch
import pytest
from fastapi.testclient import TestClient
from shared_code.models import (
ConsumeResponse,
Dataset,
Error,
Node,
ShareNode,
ShareRequest,
ShareResponse,
StatusDetails,
)
from .conftest import MinimalResponse
def test_version(client: TestClient):
share_response = client.get(
url="/version",
headers={"accept": "application/json", "Content-Type": "application/json"},
)
assert share_response.status_code == 200
def test_time(client: TestClient):
share_response = client.get(
url="/time",
headers={"accept": "application/json", "Content-Type": "application/json"},
)
assert share_response.status_code == 200
@pytest.mark.parametrize(
"initialize_return,initialize_azure_clients_return, status_code",
[(True, True, 200), (True, False, 500)],
)
def test_create_share(
client: TestClient, initialize_return, initialize_azure_clients_return, status_code
):
with patch("requests.get") as mock_requests_get, patch(
"shared_code.datashare_service.DatashareService.initialize_azure_clients"
) as mock_initialize_azure_clients, patch(
"shared_code.datashare_service.DatashareService.initialize"
) as mock_initialize, patch(
"shared_code.datashare_service.DatashareService.share"
) as mock_share:
node = Node(
node_id="testa",
tenant_id="00000000-0000-0000-000000000000",
identity="00000000-0000-0000-000000000000",
)
mock_requests_get.return_value = MinimalResponse(
status_code=200, text=node.json()
)
mock_initialize.return_value = initialize_return
mock_initialize_azure_clients.return_value = initialize_azure_clients_return
dataset = Dataset(
resource_group_name="testrg",
storage_account_name="testsa",
container_name="testc",
folder_path="testfolder",
file_name="testfile",
)
status = StatusDetails(
status="Pending", start=datetime.utcnow(), end=datetime.utcnow(), duration=0
)
error = Error(
code=0, message="No error", source="share_rest_api", date=datetime.utcnow()
)
node_id = "testa"
mock_share.return_value = dict(
ShareResponse(
invitation_id="00000000-0000-0000-000000000000",
invitation_name="invitationName",
provider_node_id=node_id,
consumer_node_id=node_id,
dataset=dataset,
status=status,
error=error,
)
)
share = ShareRequest(
provider_node_id=node_id, consumer_node_id=node_id, dataset=dataset
)
share_response = client.post(
url="/share",
json=share.dict(),
headers={"accept": "application/json", "Content-Type": "application/json"},
)
assert share_response.status_code == status_code
@pytest.mark.parametrize(
"initialize_return,initialize_azure_clients_return, status_code",
[(True, True, 200), (True, False, 500)],
)
def test_get_share(
client: TestClient, initialize_return, initialize_azure_clients_return, status_code
):
with patch("requests.get") as mock_requests_get, patch(
"shared_code.datashare_service.DatashareService.initialize_azure_clients"
) as mock_initialize_azure_clients, patch(
"shared_code.datashare_service.DatashareService.initialize"
) as mock_initialize, patch(
"shared_code.datashare_service.DatashareService.share_status"
) as mock_share_status:
node = Node(
node_id="testa",
tenant_id="00000000-0000-0000-000000000000",
identity="00000000-0000-0000-000000000000",
)
mock_requests_get.return_value = MinimalResponse(
status_code=200, text=node.json()
)
mock_initialize.return_value = initialize_return
mock_initialize_azure_clients.return_value = initialize_azure_clients_return
dataset = Dataset(
resource_group_name="testrg",
storage_account_name="testsa",
container_name="testc",
folder_path="testfolder",
file_name="testfile",
)
status = StatusDetails(
status="Pending", start=datetime.utcnow(), end=datetime.utcnow(), duration=0
)
error = Error(
code=0, message="No error", source="share_rest_api", date=datetime.utcnow()
)
node_id = "testa"
mock_share_status.return_value = dict(
ShareResponse(
invitation_id="00000000-0000-0000-000000000000",
invitation_name="invitationName",
provider_node_id=node_id,
consumer_node_id=node_id,
dataset=dataset,
status=status,
error=error,
)
)
params = dict()
params["provider_node_id"] = node_id
params["consumer_node_id"] = node_id
params["datashare_storage_resource_group_name"] = "testrg"
params["datashare_storage_account_name"] = "testsa"
params["datashare_storage_container_name"] = "testc"
params["datashare_storage_folder_path"] = "testfolder"
params["datashare_storage_file_name"] = "testfile"
share_response = client.get(
url="/share",
params=params,
headers={"accept": "application/json", "Content-Type": "application/json"},
)
assert share_response.status_code == status_code
@pytest.mark.parametrize(
"initialize_return,initialize_azure_clients_return, status_code",
[(True, True, 200), (True, False, 500)],
)
def test_get_consume(
client: TestClient, initialize_return, initialize_azure_clients_return, status_code
):
with patch("requests.get") as mock_requests_get, patch(
"shared_code.datashare_service.DatashareService.initialize_azure_clients"
) as mock_initialize_azure_clients, patch(
"shared_code.datashare_service.DatashareService.initialize"
) as mock_initialize, patch(
"shared_code.datashare_service.DatashareService.consume"
) as mock_consume:
node = Node(
node_id="testa",
tenant_id="00000000-0000-0000-000000000000",
identity="00000000-0000-0000-000000000000",
)
mock_requests_get.return_value = MinimalResponse(
status_code=200, text=node.json()
)
mock_initialize.return_value = initialize_return
mock_initialize_azure_clients.return_value = initialize_azure_clients_return
dataset = Dataset(
resource_group_name="testrg",
storage_account_name="testsa",
container_name="testc",
folder_path="testfolder",
file_name="testfile",
)
status = StatusDetails(
status="Pending", start=datetime.utcnow(), end=datetime.utcnow(), duration=0
)
error = Error(
code=0, message="No error", source="share_rest_api", date=datetime.utcnow()
)
node_id = "testa"
invitation_id = ("00000000-0000-0000-000000000000",)
mock_consume.return_value = ConsumeResponse(
invitation_id="00000000-0000-0000-000000000000",
provider_node_id=node_id,
consumer_node_id=node_id,
dataset=dataset,
status=status,
error=error,
)
params = dict()
params["provider_node_id"] = node_id
params["consumer_node_id"] = node_id
params["invitation_id"] = invitation_id
share_response = client.get(
url="/consume",
params=params,
headers={"accept": "application/json", "Content-Type": "application/json"},
)
assert share_response.status_code == status_code
@pytest.mark.parametrize(
"initialize_return,initialize_azure_clients_return, status_code",
[(True, True, 200), (True, False, 500)],
)
def test_shareconsume(
client: TestClient, initialize_return, initialize_azure_clients_return, status_code
):
with patch("requests.get") as mock_requests_get, patch(
"shared_code.datashare_service.DatashareService.initialize_azure_clients"
) as mock_initialize_azure_clients, patch(
"shared_code.datashare_service.DatashareService.initialize"
) as mock_initialize, patch(
"shared_code.datashare_service.DatashareService.share"
) as mock_share:
node = Node(
node_id="testa",
tenant_id="00000000-0000-0000-000000000000",
identity="00000000-0000-0000-000000000000",
)
mock_requests_get.return_value = MinimalResponse(
status_code=200, text=node.json()
)
mock_initialize.return_value = initialize_return
mock_initialize_azure_clients.return_value = initialize_azure_clients_return
dataset = Dataset(
resource_group_name="testrg",
storage_account_name="testsa",
container_name="testc",
folder_path="testfolder",
file_name="testfile",
)
status = StatusDetails(
status="Pending", start=datetime.utcnow(), end=datetime.utcnow(), duration=0
)
error = Error(
code=0, message="No error", source="share_rest_api", date=datetime.utcnow()
)
node_id = "testa"
mock_share.return_value = dict(
ShareResponse(
invitation_id="00000000-0000-0000-000000000000",
invitation_name="invitationName",
provider_node_id=node_id,
consumer_node_id=node_id,
dataset=dataset,
status=status,
error=error,
)
)
share = ShareRequest(
provider_node_id=node_id, consumer_node_id=node_id, dataset=dataset
)
share_response = client.post(
url="/shareconsume",
json=share.dict(),
headers={"accept": "application/json", "Content-Type": "application/json"},
)
assert share_response.status_code == status_code
@pytest.mark.parametrize(
"create_error, status_code",
[(False, 200), (True, 500)],
)
def test_get_shareconsume(client: TestClient, create_error, status_code):
with patch("requests.get") as mock_requests_get:
dataset = Dataset(
resource_group_name="testrg",
storage_account_name="testsa",
container_name="testc",
folder_path="testfolder",
file_name="testfile",
)
status = StatusDetails(
status="Pending", start=datetime.utcnow(), end=datetime.utcnow(), duration=0
)
error = Error(
code=0, message="No error", source="share_rest_api", date=datetime.utcnow()
)
node_id = "testa"
invitation_id = ("00000000-0000-0000-000000000000",)
cr = ConsumeResponse(
invitation_id="00000000-0000-0000-000000000000",
provider_node_id=node_id,
consumer_node_id=node_id,
dataset=dataset,
status=status,
error=error,
)
if create_error is True:
mock_requests_get.return_value = MinimalResponse(
status_code=500, text="Exception occurred"
)
else:
mock_requests_get.return_value = MinimalResponse(
status_code=200, text=cr.json()
)
params = dict()
params["provider_node_id"] = node_id
params["consumer_node_id"] = node_id
params["invitation_id"] = invitation_id
share_response = client.get(
url="/shareconsume",
params=params,
headers={"accept": "application/json", "Content-Type": "application/json"},
)
assert share_response.status_code == status_code
@pytest.mark.parametrize(
"initialize_return,initialize_azure_clients_return, status_code",
[(True, True, 200), (True, False, 500)],
)
def test_consumeshare(
client: TestClient, initialize_return, initialize_azure_clients_return, status_code
):
with patch("requests.get") as mock_requests_get, patch(
"shared_code.datashare_service.DatashareService.initialize_azure_clients"
) as mock_initialize_azure_clients, patch(
"shared_code.datashare_service.DatashareService.initialize"
) as mock_initialize, patch(
"shared_code.datashare_service.DatashareService.consume"
) as mock_consume:
node = Node(
node_id="testa",
tenant_id="00000000-0000-0000-000000000000",
identity="00000000-0000-0000-000000000000",
)
mock_requests_get.return_value = MinimalResponse(
status_code=200, text=node.json()
)
mock_initialize.return_value = initialize_return
mock_initialize_azure_clients.return_value = initialize_azure_clients_return
dataset = Dataset(
resource_group_name="testrg",
storage_account_name="testsa",
container_name="testc",
folder_path="testfolder",
file_name="testfile",
)
status = StatusDetails(
status="Pending", start=datetime.utcnow(), end=datetime.utcnow(), duration=0
)
error = Error(
code=0, message="No error", source="share_rest_api", date=datetime.utcnow()
)
node_id = "testa"
invitation_id = ("00000000-0000-0000-000000000000",)
mock_consume.return_value = dict(
ConsumeResponse(
invitation_id="00000000-0000-0000-000000000000",
provider_node_id=node_id,
consumer_node_id=node_id,
dataset=dataset,
status=status,
error=error,
)
)
params = dict()
params["provider_node_id"] = node_id
params["consumer_node_id"] = node_id
params["invitation_id"] = invitation_id
share_response = client.get(
url="/consumeshare",
params=params,
headers={"accept": "application/json", "Content-Type": "application/json"},
)
assert share_response.status_code == status_code
def test_register_node(share_service):
with patch("requests.post") as mock_requests_post, patch(
"shared_code.datashare_service.DatashareService.initialize_azure_clients"
) as mock_initialize_azure_clients, patch(
"shared_code.datashare_service.DatashareService.initialize"
) as mock_initialize:
node = ShareNode(
node_id="testa",
tenant_id="00000000-0000-0000-000000000000",
identity="00000000-0000-0000-000000000000",
url="http://127.0.0.1/",
name="testa",
)
mock_requests_post.return_value = MinimalResponse(
status_code=200, text=node.json()
)
mock_initialize.return_value = True
mock_initialize_azure_clients.return_value = True
result = share_service.register_share_node()
assert result is True
| 36.477541
| 88
| 0.629812
| 1,572
| 15,430
| 5.881679
| 0.071247
| 0.038936
| 0.07852
| 0.031149
| 0.900714
| 0.89801
| 0.894008
| 0.88514
| 0.879624
| 0.879624
| 0
| 0.059707
| 0.270577
| 15,430
| 422
| 89
| 36.563981
| 0.761795
| 0
| 0
| 0.70437
| 0
| 0
| 0.224563
| 0.135774
| 0
| 0
| 0
| 0
| 0.023136
| 1
| 0.023136
| false
| 0
| 0.015424
| 0
| 0.03856
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c474b5c468def885731a05bf7708bae481d82efa
| 31
|
py
|
Python
|
treex/regularizers/__init__.py
|
BioGeek/treex
|
fcbee17fcbc069ff5d33554013ce00e49405f872
|
[
"MIT"
] | null | null | null |
treex/regularizers/__init__.py
|
BioGeek/treex
|
fcbee17fcbc069ff5d33554013ce00e49405f872
|
[
"MIT"
] | null | null | null |
treex/regularizers/__init__.py
|
BioGeek/treex
|
fcbee17fcbc069ff5d33554013ce00e49405f872
|
[
"MIT"
] | null | null | null |
from .l1l2 import L1, L1L2, L2
| 15.5
| 30
| 0.709677
| 6
| 31
| 3.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.24
| 0.193548
| 31
| 1
| 31
| 31
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
67199dbda39f83db141a209b0d41df14ff6f15d7
| 195
|
py
|
Python
|
scrapy_poi/middlewares.py
|
ygo-prometheus/bilibili_danmaku_sensor
|
7b732a3f581023c17e942f40f1181075736dbf68
|
[
"MIT"
] | null | null | null |
scrapy_poi/middlewares.py
|
ygo-prometheus/bilibili_danmaku_sensor
|
7b732a3f581023c17e942f40f1181075736dbf68
|
[
"MIT"
] | null | null | null |
scrapy_poi/middlewares.py
|
ygo-prometheus/bilibili_danmaku_sensor
|
7b732a3f581023c17e942f40f1181075736dbf68
|
[
"MIT"
] | null | null | null |
from user_agent import generate_user_agent
class RandomUserAgentMiddleware(object):
def process_request(self, request, spider):
request.headers["User-Agent"] = generate_user_agent()
| 32.5
| 61
| 0.779487
| 23
| 195
| 6.347826
| 0.608696
| 0.246575
| 0.232877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 195
| 6
| 61
| 32.5
| 0.863905
| 0
| 0
| 0
| 1
| 0
| 0.05102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
67288b8610ba729caaf44fa94b3ef05f8dc767a3
| 38,329
|
py
|
Python
|
tvdb_api/api/users_api.py
|
h3llrais3r/tvdbapi-v2-client
|
1210df9dd5869ccc5b63149b1b80630310a14f40
|
[
"MIT"
] | 2
|
2021-01-24T07:45:22.000Z
|
2021-11-15T11:29:25.000Z
|
tvdb_api/api/users_api.py
|
h3llrais3r/tvdb_api_v2
|
1210df9dd5869ccc5b63149b1b80630310a14f40
|
[
"MIT"
] | null | null | null |
tvdb_api/api/users_api.py
|
h3llrais3r/tvdb_api_v2
|
1210df9dd5869ccc5b63149b1b80630310a14f40
|
[
"MIT"
] | 1
|
2020-05-07T10:16:15.000Z
|
2020-05-07T10:16:15.000Z
|
# coding: utf-8
"""
TheTVDB API v2
API v3 targets v2 functionality with a few minor additions. The API is accessible via https://api.thetvdb.com and provides the following REST endpoints in JSON format. How to use this API documentation ---------------- You may browse the API routes without authentication, but if you wish to send requests to the API and see response data, then you must authenticate. 1. Obtain a JWT token by `POST`ing to the `/login` route in the `Authentication` section with your API key and credentials. 1. Paste the JWT token from the response into the \"JWT Token\" field at the top of the page and click the 'Add Token' button. You will now be able to use the remaining routes to send requests to the API and get a response. Language Selection ---------------- Language selection is done via the `Accept-Language` header. At the moment, you may only pass one language abbreviation in the header at a time. Valid language abbreviations can be found at the `/languages` route.. Authentication ---------------- Authentication to use the API is similar to the How-to section above. Users must `POST` to the `/login` route with their API key and credentials in the following format in order to obtain a JWT token. `{\"apikey\":\"APIKEY\",\"username\":\"USERNAME\",\"userkey\":\"USERKEY\"}` Note that the username and key are ONLY required for the `/user` routes. The user's key is labled `Account Identifier` in the account section of the main site. The token is then used in all subsequent requests by providing it in the `Authorization` header. The header will look like: `Authorization: Bearer <yourJWTtoken>`. Currently, the token expires after 24 hours. You can `GET` the `/refresh_token` route to extend that expiration date. Versioning ---------------- You may request a different version of the API by including an `Accept` header in your request with the following format: `Accept:application/vnd.thetvdb.v$VERSION`. This documentation automatically uses the version seen at the top and bottom of the page. # noqa: E501
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tvdb_api.api_client import ApiClient
class UsersApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def user_favorites_get(self, **kwargs): # noqa: E501
"""user_favorites_get # noqa: E501
Returns an array of favorite series for a given user, will be a blank array if no favorites exist. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_favorites_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserFavoritesData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_favorites_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_favorites_get_with_http_info(**kwargs) # noqa: E501
return data
def user_favorites_get_with_http_info(self, **kwargs): # noqa: E501
"""user_favorites_get # noqa: E501
Returns an array of favorite series for a given user, will be a blank array if no favorites exist. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_favorites_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserFavoritesData
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_favorites_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user/favorites', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserFavoritesData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_favorites_id_delete(self, id, **kwargs): # noqa: E501
"""user_favorites_id_delete # noqa: E501
Deletes the given series ID from the user’s favorite’s list and returns the updated list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_favorites_id_delete(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: UserFavoritesData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_favorites_id_delete_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_favorites_id_delete_with_http_info(id, **kwargs) # noqa: E501
return data
def user_favorites_id_delete_with_http_info(self, id, **kwargs): # noqa: E501
"""user_favorites_id_delete # noqa: E501
Deletes the given series ID from the user’s favorite’s list and returns the updated list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_favorites_id_delete_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: UserFavoritesData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_favorites_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_favorites_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user/favorites/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserFavoritesData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_favorites_id_put(self, id, **kwargs): # noqa: E501
"""user_favorites_id_put # noqa: E501
Adds the supplied series ID to the user’s favorite’s list and returns the updated list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_favorites_id_put(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: UserFavoritesData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_favorites_id_put_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_favorites_id_put_with_http_info(id, **kwargs) # noqa: E501
return data
def user_favorites_id_put_with_http_info(self, id, **kwargs): # noqa: E501
"""user_favorites_id_put # noqa: E501
Adds the supplied series ID to the user’s favorite’s list and returns the updated list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_favorites_id_put_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: UserFavoritesData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_favorites_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `user_favorites_id_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user/favorites/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserFavoritesData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_get(self, **kwargs): # noqa: E501
"""user_get # noqa: E501
Returns basic information about the currently authenticated user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_get_with_http_info(**kwargs) # noqa: E501
return data
def user_get_with_http_info(self, **kwargs): # noqa: E501
"""user_get # noqa: E501
Returns basic information about the currently authenticated user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserData
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_ratings_get(self, **kwargs): # noqa: E501
"""user_ratings_get # noqa: E501
Returns an array of ratings for the given user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserRatingsData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_ratings_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_ratings_get_with_http_info(**kwargs) # noqa: E501
return data
def user_ratings_get_with_http_info(self, **kwargs): # noqa: E501
"""user_ratings_get # noqa: E501
Returns an array of ratings for the given user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserRatingsData
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_ratings_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user/ratings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserRatingsData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_ratings_item_type_item_id_delete(self, item_type, item_id, **kwargs): # noqa: E501
"""user_ratings_item_type_item_id_delete # noqa: E501
This route deletes a given rating of a given type. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_item_type_item_id_delete(item_type, item_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str item_type: Item to update. Can be either 'series', 'episode', or 'image' (required)
:param int item_id: ID of the ratings record that you wish to modify (required)
:return: UserRatingsDataNoLinksEmptyArray
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_ratings_item_type_item_id_delete_with_http_info(item_type, item_id, **kwargs) # noqa: E501
else:
(data) = self.user_ratings_item_type_item_id_delete_with_http_info(item_type, item_id, **kwargs) # noqa: E501
return data
def user_ratings_item_type_item_id_delete_with_http_info(self, item_type, item_id, **kwargs): # noqa: E501
"""user_ratings_item_type_item_id_delete # noqa: E501
This route deletes a given rating of a given type. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_item_type_item_id_delete_with_http_info(item_type, item_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str item_type: Item to update. Can be either 'series', 'episode', or 'image' (required)
:param int item_id: ID of the ratings record that you wish to modify (required)
:return: UserRatingsDataNoLinksEmptyArray
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['item_type', 'item_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_ratings_item_type_item_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'item_type' is set
if ('item_type' not in params or
params['item_type'] is None):
raise ValueError("Missing the required parameter `item_type` when calling `user_ratings_item_type_item_id_delete`") # noqa: E501
# verify the required parameter 'item_id' is set
if ('item_id' not in params or
params['item_id'] is None):
raise ValueError("Missing the required parameter `item_id` when calling `user_ratings_item_type_item_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'item_type' in params:
path_params['itemType'] = params['item_type'] # noqa: E501
if 'item_id' in params:
path_params['itemId'] = params['item_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user/ratings/{itemType}/{itemId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserRatingsDataNoLinksEmptyArray', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_ratings_item_type_item_id_item_rating_put(self, item_type, item_id, item_rating, **kwargs): # noqa: E501
"""user_ratings_item_type_item_id_item_rating_put # noqa: E501
This route updates a given rating of a given type. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_item_type_item_id_item_rating_put(item_type, item_id, item_rating, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str item_type: Item to update. Can be either 'series', 'episode', or 'image' (required)
:param int item_id: ID of the ratings record that you wish to modify (required)
:param int item_rating: The updated rating number (required)
:return: UserRatingsDataNoLinks
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_ratings_item_type_item_id_item_rating_put_with_http_info(item_type, item_id, item_rating, **kwargs) # noqa: E501
else:
(data) = self.user_ratings_item_type_item_id_item_rating_put_with_http_info(item_type, item_id, item_rating, **kwargs) # noqa: E501
return data
def user_ratings_item_type_item_id_item_rating_put_with_http_info(self, item_type, item_id, item_rating, **kwargs): # noqa: E501
"""user_ratings_item_type_item_id_item_rating_put # noqa: E501
This route updates a given rating of a given type. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_item_type_item_id_item_rating_put_with_http_info(item_type, item_id, item_rating, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str item_type: Item to update. Can be either 'series', 'episode', or 'image' (required)
:param int item_id: ID of the ratings record that you wish to modify (required)
:param int item_rating: The updated rating number (required)
:return: UserRatingsDataNoLinks
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['item_type', 'item_id', 'item_rating'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_ratings_item_type_item_id_item_rating_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'item_type' is set
if ('item_type' not in params or
params['item_type'] is None):
raise ValueError("Missing the required parameter `item_type` when calling `user_ratings_item_type_item_id_item_rating_put`") # noqa: E501
# verify the required parameter 'item_id' is set
if ('item_id' not in params or
params['item_id'] is None):
raise ValueError("Missing the required parameter `item_id` when calling `user_ratings_item_type_item_id_item_rating_put`") # noqa: E501
# verify the required parameter 'item_rating' is set
if ('item_rating' not in params or
params['item_rating'] is None):
raise ValueError("Missing the required parameter `item_rating` when calling `user_ratings_item_type_item_id_item_rating_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'item_type' in params:
path_params['itemType'] = params['item_type'] # noqa: E501
if 'item_id' in params:
path_params['itemId'] = params['item_id'] # noqa: E501
if 'item_rating' in params:
path_params['itemRating'] = params['item_rating'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user/ratings/{itemType}/{itemId}/{itemRating}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserRatingsDataNoLinks', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_ratings_query_get(self, **kwargs): # noqa: E501
"""user_ratings_query_get # noqa: E501
Returns an array of ratings for a given user that match the query. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_query_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str item_type: Item to query. Can be either 'series', 'episode', or 'banner'
:return: UserRatingsData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_ratings_query_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_ratings_query_get_with_http_info(**kwargs) # noqa: E501
return data
def user_ratings_query_get_with_http_info(self, **kwargs): # noqa: E501
"""user_ratings_query_get # noqa: E501
Returns an array of ratings for a given user that match the query. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_query_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str item_type: Item to query. Can be either 'series', 'episode', or 'banner'
:return: UserRatingsData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['item_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_ratings_query_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'item_type' in params:
query_params.append(('itemType', params['item_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user/ratings/query', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserRatingsData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def user_ratings_query_params_get(self, **kwargs): # noqa: E501
"""user_ratings_query_params_get # noqa: E501
Returns a list of query params for use in the `/user/ratings/query` route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_query_params_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserRatingsQueryParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_ratings_query_params_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.user_ratings_query_params_get_with_http_info(**kwargs) # noqa: E501
return data
def user_ratings_query_params_get_with_http_info(self, **kwargs): # noqa: E501
"""user_ratings_query_params_get # noqa: E501
Returns a list of query params for use in the `/user/ratings/query` route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_ratings_query_params_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserRatingsQueryParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method user_ratings_query_params_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/user/ratings/query/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserRatingsQueryParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.935449
| 2,040
| 0.623366
| 4,643
| 38,329
| 4.879819
| 0.063106
| 0.050139
| 0.022774
| 0.022863
| 0.909697
| 0.908505
| 0.904136
| 0.898045
| 0.896721
| 0.892395
| 0
| 0.016199
| 0.288111
| 38,329
| 913
| 2,041
| 41.98138
| 0.814154
| 0.374234
| 0
| 0.813142
| 0
| 0
| 0.184006
| 0.05541
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039014
| false
| 0
| 0.008214
| 0
| 0.104723
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
673822f3cead8313ffad99b48783479565c4d0fa
| 114
|
py
|
Python
|
framework/utils.py
|
Dadle/Saltie
|
a78dbc95e63153b47731252e3c825cb3afa34a1f
|
[
"MIT"
] | 138
|
2018-03-02T16:36:51.000Z
|
2022-03-10T07:00:03.000Z
|
framework/utils.py
|
Dadle/Saltie
|
a78dbc95e63153b47731252e3c825cb3afa34a1f
|
[
"MIT"
] | 46
|
2017-11-27T21:35:15.000Z
|
2018-02-24T01:22:58.000Z
|
framework/utils.py
|
Dadle/Saltie
|
a78dbc95e63153b47731252e3c825cb3afa34a1f
|
[
"MIT"
] | 29
|
2018-03-16T19:02:35.000Z
|
2022-02-02T21:58:59.000Z
|
import os
def get_repo_directory():
return os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
| 19
| 71
| 0.719298
| 17
| 114
| 4.470588
| 0.647059
| 0.236842
| 0.342105
| 0.394737
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149123
| 114
| 5
| 72
| 22.8
| 0.783505
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 9
|
6749426abfb96baae45a788ea6d3d21dc0cabc4e
| 3,637
|
py
|
Python
|
tests/tests_rearrange_digits.py
|
quervernetzt/form-two-numbers-from-array-max
|
8c3dc79ce54420e3dad29e033598dfaffb7633b6
|
[
"MIT"
] | null | null | null |
tests/tests_rearrange_digits.py
|
quervernetzt/form-two-numbers-from-array-max
|
8c3dc79ce54420e3dad29e033598dfaffb7633b6
|
[
"MIT"
] | null | null | null |
tests/tests_rearrange_digits.py
|
quervernetzt/form-two-numbers-from-array-max
|
8c3dc79ce54420e3dad29e033598dfaffb7633b6
|
[
"MIT"
] | null | null | null |
import unittest
from solution.rearrange_digits import RearrangeDigits
class TestCasesRotatedArraySearch(unittest.TestCase):
def input_list_is_none_return_empty_list(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = None
# Act
result: list = rearrange_digits.main(input_list)
# Assert
self.assertEqual(result, [])
def input_list_is_empty_list_return_empty_list(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = []
# Act
result: list = rearrange_digits.main(input_list)
# Assert
self.assertEqual(result, [])
def input_list_has_one_element_return_list_with_element(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = [1]
# Act
result: list = rearrange_digits.main(input_list)
# Assert
self.assertEqual(result, [1])
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
def input_list_has_element_greater_nine_throw_value_exception(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = [4, 6, 2, 15, 9, 8]
# Act & Assert
self.assertRaises(ValueError, rearrange_digits.main, input_list)
def input_list_has_element_smaller_zero_throw_value_exception(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = [4, -6, 2, 5, 9, 8]
# Act & Assert
self.assertRaises(ValueError, rearrange_digits.main, input_list)
def input_list_has_element_is_str_throw_type_exception(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = [4, 6, 2, 5, "test", 8]
# Act & Assert
self.assertRaises(TypeError, rearrange_digits.main, input_list)
def input_list_has_element_is_float_throw_type_exception(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = [4, 6, 2, 5, 1.1, 8]
# Act & Assert
self.assertRaises(TypeError, rearrange_digits.main, input_list)
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
def input_list_sorted_return_max_sum(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = [1, 2, 3, 4, 5]
# Act
result: list = rearrange_digits.main(input_list)
# Assert
self.assertEqual(result, [531, 42])
def input_list_unsorted_return_max_sum(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = [4, 6, 2, 5, 9, 8]
# Act
result: list = rearrange_digits.main(input_list)
# Assert
self.assertEqual(result, [964, 852])
def input_list_unsorted_with_duplicates_return_max_sum(self: object) -> None:
# Arrange
rearrange_digits: RearrangeDigits = RearrangeDigits()
input_list: list = [4, 6, 2, 2, 5, 9, 9, 8]
# Act
result: list = rearrange_digits.main(input_list)
# Assert
self.assertEqual(result, [9852, 9642])
| 33.990654
| 88
| 0.594721
| 377
| 3,637
| 5.442971
| 0.167109
| 0.131579
| 0.05848
| 0.102339
| 0.852827
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0
| 0.023645
| 0.244157
| 3,637
| 107
| 89
| 33.990654
| 0.722808
| 0.131152
| 0
| 0.44898
| 0
| 0
| 0.001278
| 0
| 0
| 0
| 0
| 0
| 0.204082
| 1
| 0.204082
| false
| 0
| 0.040816
| 0
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
674b904a5d4b38f68be0a775b5c4ac8a68838dfc
| 5,366
|
py
|
Python
|
rtamt/spec/ltl/discrete_time/reset.py
|
sguysc/rtamt
|
a16db77b61028f774d81457ff22e666229a5432c
|
[
"BSD-3-Clause"
] | 24
|
2019-12-04T00:20:16.000Z
|
2022-03-24T17:48:14.000Z
|
rtamt/spec/ltl/discrete_time/reset.py
|
sguysc/rtamt
|
a16db77b61028f774d81457ff22e666229a5432c
|
[
"BSD-3-Clause"
] | 142
|
2020-01-16T15:36:21.000Z
|
2022-03-28T20:40:45.000Z
|
rtamt/spec/ltl/discrete_time/reset.py
|
sguysc/rtamt
|
a16db77b61028f774d81457ff22e666229a5432c
|
[
"BSD-3-Clause"
] | 17
|
2020-07-07T20:32:08.000Z
|
2022-03-07T07:20:22.000Z
|
from rtamt.spec.ltl.discrete_time.visitor import LTLVisitor
class LTLReset(LTLVisitor):
def __init__(self, node_monitor_dict=None):
self.node_monitor_dict = node_monitor_dict
def reset(self, element):
return self.visit(element, [])
def visitConstant(self, element, args):
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitPredicate(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitVariable(self, element, args):
#monitor = self.node_monitor_dict[element.name]
#monitor.reset()
pass
def visitAddition(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitMultiplication(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitSubtraction(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitDivision(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitAbs(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitSqrt(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitExp(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitPow(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitRise(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitFall(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitNot(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitAnd(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitOr(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitImplies(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitIff(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitXor(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitEventually(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitAlways(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitUntil(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitOnce(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitPrevious(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitNext(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitHistorically(self, element, args):
self.visit(element.children[0], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitSince(self, element, args):
self.visit(element.children[0], args)
self.visit(element.children[1], args)
monitor = self.node_monitor_dict[element.name]
monitor.reset()
def visitDefault(self, element):
pass
| 33.748428
| 59
| 0.649646
| 646
| 5,366
| 5.295666
| 0.085139
| 0.102602
| 0.182403
| 0.222157
| 0.850044
| 0.850044
| 0.850044
| 0.850044
| 0.850044
| 0.850044
| 0
| 0.009194
| 0.22978
| 5,366
| 159
| 60
| 33.748428
| 0.818534
| 0.011368
| 0
| 0.730159
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.238095
| false
| 0.015873
| 0.007937
| 0.007937
| 0.261905
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
675282830a1be535de9a3ca6e8541b1e33b6401e
| 236
|
py
|
Python
|
my_pkg/wrap.py
|
Mr-Milk/python-hmr
|
1a71f3413ea2374afc27919031db02e09f0f6b75
|
[
"MIT"
] | 8
|
2021-01-20T13:28:23.000Z
|
2021-08-20T21:35:46.000Z
|
my_pkg/wrap.py
|
Mr-Milk/python-hmr
|
1a71f3413ea2374afc27919031db02e09f0f6b75
|
[
"MIT"
] | 5
|
2022-02-07T14:54:50.000Z
|
2022-03-01T20:19:19.000Z
|
my_pkg/wrap.py
|
Mr-Milk/python-hmr
|
1a71f3413ea2374afc27919031db02e09f0f6b75
|
[
"MIT"
] | null | null | null |
import functools
def wrap(f):
def args(*arg, **kwargs):
return f(*arg, **kwargs)
return args
def work_wrap(f):
@functools.wraps(f)
def args(*arg, **kwargs):
return f(*arg, **kwargs)
return args
| 13.882353
| 32
| 0.572034
| 32
| 236
| 4.1875
| 0.34375
| 0.268657
| 0.447761
| 0.164179
| 0.641791
| 0.641791
| 0.641791
| 0.641791
| 0.641791
| 0.641791
| 0
| 0
| 0.275424
| 236
| 16
| 33
| 14.75
| 0.783626
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.1
| 0.2
| 0.9
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
677e2f4ec20dc6991fee6600ceb35fadc2b45869
| 12,643
|
py
|
Python
|
api/views/postView.py
|
Head8che/CMPUT404-project-socialdistribution
|
ada98f37216c14a7af0134286cb056c1253ef035
|
[
"W3C-20150513"
] | null | null | null |
api/views/postView.py
|
Head8che/CMPUT404-project-socialdistribution
|
ada98f37216c14a7af0134286cb056c1253ef035
|
[
"W3C-20150513"
] | 45
|
2021-10-04T01:37:32.000Z
|
2021-12-09T02:15:56.000Z
|
api/views/postView.py
|
Head8che/CMPUT404-project-socialdistribution
|
ada98f37216c14a7af0134286cb056c1253ef035
|
[
"W3C-20150513"
] | 3
|
2021-11-25T03:27:03.000Z
|
2021-12-14T05:20:10.000Z
|
import json
from re import A
import requests
from rest_framework.decorators import api_view
from rest_framework.response import Response
from api.models.inboxModel import Inbox
from api.models.nodeModel import Node
from ..models.authorModel import Author
from ..models.postModel import Post
from rest_framework import status
from ..serializers import PostSerializer
from django.db.models import Q
from ..utils import getPageNumber, getPageSize, getPaginatedObject, getUUIDFromId, handlePostImage, loggedInUserIsAuthor, postToAuthorInbox
@api_view(['GET'])
def StreamList(request):
# List all the posts
if request.method == 'GET':
try: # try to get the posts
if request.user.id is not None:
author = Author.objects.get(uuid=getUUIDFromId(request.user.id))
followers = author.followers.all()
friendIds = []
for follower in followers:
followerFollowers = Author.objects.get(uuid=follower.uuid).followers.all()
for followerFollower in followerFollowers:
if str(followerFollower.id) == str(author.id):
friendIds.append(follower.id)
publicPosts = Post.objects.filter(Q(visibility="PUBLIC"), unlisted=False).order_by('-published')
otherAuthorsFriendPosts = Post.objects.filter(Q(visibility="FRIENDS"), author__id__in=friendIds, unlisted=False).order_by('-published')
ownFriendPosts = Post.objects.filter(Q(visibility="FRIENDS"), author__id=request.user.id, unlisted=False).order_by('-published')
posts = publicPosts | otherAuthorsFriendPosts | ownFriendPosts
else:
posts = Post.objects.filter(Q(visibility="PUBLIC"), unlisted=False).order_by('-published')
except: # return an error if something goes wrong
return Response(status=status.HTTP_404_NOT_FOUND)
# get the page number and size
page_number = getPageNumber(request)
page_size = getPageSize(request)
# get the paginated posts
paginated_posts = getPaginatedObject(posts, page_number, page_size)
# get the Post serializer
serializer = PostSerializer(paginated_posts, many=True)
# create the `type` field for the Posts data
new_data = {'type': "posts"}
# add the `type` field to the Posts data
new_data.update({
'items': serializer.data,
})
# return the updated Posts data
return Response(new_data, status=status.HTTP_200_OK)
# Handle unaccepted methods
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@api_view(['POST', 'GET'])
def ShareFriends(request, author_uuid):
try: # try to get the specific author
authorObject = Author.objects.get(uuid=author_uuid)
except: # return an error if something goes wrong
return Response(status=status.HTTP_404_NOT_FOUND)
# Create a new post
if request.method == 'POST':
# if the logged in user is not the author
if not loggedInUserIsAuthor(request, author_uuid):
return Response(status=status.HTTP_401_UNAUTHORIZED)
try: # try to get the image handled data
post_count = Post.objects.filter(author=author_uuid).count()
request_data = handlePostImage(request.data)
if request_data.get('title') == None:
request_data["title"] = "Post " + str(post_count + 1) + " by " + authorObject.displayName
except: # return an error if something goes wrong
return Response(status=status.HTTP_400_BAD_REQUEST)
# get the Post serializer
serializer = PostSerializer(data=request_data)
# update the Post data if the serializer is valid
if serializer.is_valid():
serializer.save(author=authorObject)
try: # try to get the followers
author = Author.objects.get(uuid=getUUIDFromId(request.user.id))
followers = author.followers.all()
friendIds = []
for follower in followers:
followerFollowers = Author.objects.get(uuid=follower.uuid).followers.all()
for followerFollower in followerFollowers:
if str(followerFollower.id) == str(author.id):
friendIds.append(follower.id)
friends = Author.objects.filter(id__in=friendIds)
for friend in friends:
try:
remote_node = Node.objects.filter(text__startswith=friend.host[:20])[0]
postToAuthorInbox(request, serializer.data, friend, remote_node)
except:
postToAuthorInbox(request, serializer.data, friend)
except: # return an error if something goes wrong
pass
return Response({"message": "Post created", "data": serializer.data},
status=status.HTTP_201_CREATED)
# return an error if something goes wrong with the update
return Response({"message": serializer.errors},
status=status.HTTP_400_BAD_REQUEST)
# Handle unaccepted methods
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@api_view(['POST', 'GET'])
def PostList(request, author_uuid):
try: # try to get the specific author
authorObject = Author.objects.get(uuid=author_uuid)
except: # return an error if something goes wrong
return Response(status=status.HTTP_404_NOT_FOUND)
# Create a new post
if request.method == 'POST':
# if the logged in user is not the author
if not loggedInUserIsAuthor(request, author_uuid):
return Response(status=status.HTTP_401_UNAUTHORIZED)
try: # try to get the image handled data
post_count = Post.objects.filter(author=author_uuid).count()
request_data = handlePostImage(request.data)
if request_data.get('title') == None:
request_data["title"] = "Post " + str(post_count + 1) + " by " + authorObject.displayName
except: # return an error if something goes wrong
return Response(status=status.HTTP_400_BAD_REQUEST)
# get the Post serializer
serializer = PostSerializer(data=request_data)
# update the Post data if the serializer is valid
if serializer.is_valid():
serializer.save(author=authorObject)
try: # try to get the followers
followers = Author.objects.get(uuid=author_uuid).followers.all()
for follower in followers:
try:
remote_node = Node.objects.filter(text__startswith=follower.host[:20])[0]
postToAuthorInbox(request, serializer.data, follower, remote_node)
except:
postToAuthorInbox(request, serializer.data, follower)
except: # return an error if something goes wrong
pass
return Response({"message": "Post created", "data": serializer.data},
status=status.HTTP_201_CREATED)
# return an error if something goes wrong with the update
return Response({"message": serializer.errors},
status=status.HTTP_400_BAD_REQUEST)
# List all the posts
elif request.method == 'GET':
try: # try to get the posts
if request.user.id is not None:
author = Author.objects.get(uuid=getUUIDFromId(authorObject.id))
followers = author.followers.all()
friendIds = []
for follower in followers:
followerFollowers = Author.objects.get(uuid=follower.uuid).followers.all()
for followerFollower in followerFollowers:
if str(followerFollower.id) == str(author.id):
friendIds.append(follower.id)
loggedInUserIsFriend = request.user.id in friendIds
if loggedInUserIsFriend or loggedInUserIsAuthor(request, author_uuid):
posts = Post.objects.filter(Q(visibility="FRIENDS") | Q(visibility="PUBLIC"),
author=author_uuid, unlisted=False).order_by('-published')
else:
posts = Post.objects.filter(author=author_uuid, visibility="PUBLIC", unlisted=False).order_by('-published')
else:
posts = Post.objects.filter(author=author_uuid, visibility="PUBLIC", unlisted=False).order_by('-published')
except: # return an error if something goes wrong
return Response(status=status.HTTP_404_NOT_FOUND)
# get the page number and size
page_number = getPageNumber(request)
page_size = getPageSize(request)
# get the paginated posts
paginated_posts = getPaginatedObject(posts, page_number, page_size)
# get the Post serializer
serializer = PostSerializer(paginated_posts, many=True)
# create the `type` field for the Posts data
new_data = {'type': "posts"}
# add the `type` field to the Posts data
new_data.update({
'items': serializer.data,
})
# return the updated Posts data
return Response(new_data, status=status.HTTP_200_OK)
# Handle unaccepted methods
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@api_view(['GET', 'POST', 'DELETE', 'PUT'])
def PostDetail(request, author_uuid, post_uuid):
try: # try to get the specific author
authorObject = Author.objects.get(uuid=author_uuid)
except: # return an error if something goes wrong
return Response(status=status.HTTP_404_NOT_FOUND)
# List a specific post
if request.method == 'GET':
try: # try to get the specific post
post = Post.objects.get(author=author_uuid, uuid=post_uuid)
except: # return an error if something goes wrong
return Response(status=status.HTTP_404_NOT_FOUND)
# get the Post serializer
serializer = PostSerializer(post, many=False)
# return the Post data
return Response(serializer.data, status=status.HTTP_200_OK)
# Update a specific post
elif request.method == 'POST':
# if the logged in user is not the author
if not loggedInUserIsAuthor(request, author_uuid):
return Response(status=status.HTTP_401_UNAUTHORIZED)
try: # try to get the specific post & image handled data
post = Post.objects.get(author=author_uuid, uuid=post_uuid)
request_data = handlePostImage(request.data)
except: # return an error if something goes wrong
return Response(status=status.HTTP_404_NOT_FOUND)
# get the Post serializer
serializer = PostSerializer(instance=post, data=request_data)
# update the Post data if the serializer is valid
if serializer.is_valid():
serializer.save()
return Response({"message": "Post updated", "data": serializer.data},
status=status.HTTP_200_OK)
# return an error if something goes wrong with the update
return Response({"message": serializer.errors},
status=status.HTTP_400_BAD_REQUEST)
# Create a specific post
elif request.method == 'PUT':
# if the logged in user is not the author
if not loggedInUserIsAuthor(request, author_uuid):
return Response(status=status.HTTP_401_UNAUTHORIZED)
try: # try to get the image handled data
request_data = handlePostImage(request.data)
except: # return an error if something goes wrong
return Response(status=status.HTTP_400_BAD_REQUEST)
# get the Post serializer
serializer = PostSerializer(data=request_data)
# update the Post data if the serializer is valid
if serializer.is_valid():
serializer.save(uuid=post_uuid, author=authorObject)
try: # try to get the followers
followers = Author.objects.get(uuid=author_uuid).followers.all()
for follower in followers:
try:
remote_node = Node.objects.filter(text__startswith=follower.host[:20])[0]
postToAuthorInbox(request, serializer.data, follower, remote_node)
except:
postToAuthorInbox(request, serializer.data, follower)
except: # return an error if something goes wrong
pass
return Response({"message": "Post created", "data": serializer.data},
status=status.HTTP_201_CREATED)
# return an error if something goes wrong with the update
return Response({"message": serializer.errors},
status=status.HTTP_400_BAD_REQUEST)
# Delete a specific post
elif request.method == 'DELETE':
# if the logged in user is not the author
if not loggedInUserIsAuthor(request, author_uuid):
return Response(status=status.HTTP_401_UNAUTHORIZED)
try: # try to get the specific post
post = Post.objects.get(author=author_uuid, uuid=post_uuid)
except: # return an error if something goes wrong
return Response(status=status.HTTP_404_NOT_FOUND)
# delete the post
post.delete()
# return a deletion message
return Response({"message": "Post deleted"},
status=status.HTTP_204_NO_CONTENT)
# Handle unaccepted methods
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
| 38.428571
| 143
| 0.69406
| 1,567
| 12,643
| 5.479898
| 0.097001
| 0.052172
| 0.059625
| 0.060557
| 0.865494
| 0.852568
| 0.837894
| 0.818214
| 0.803074
| 0.800745
| 0
| 0.010803
| 0.216563
| 12,643
| 328
| 144
| 38.545732
| 0.856133
| 0.191727
| 0
| 0.778802
| 0
| 0
| 0.038344
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018433
| false
| 0.013825
| 0.059908
| 0
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c03dd568c3fd2f8d514684bd60c4f64a2c44b849
| 3,056
|
py
|
Python
|
vendor/github.com/elastic/beats/metricbeat/tests/system/test_ceph.py
|
PPACI/krakenbeat
|
e75ec8f006164acb8a57d0c9609bebe534955813
|
[
"Apache-2.0"
] | 3
|
2018-01-04T19:15:26.000Z
|
2020-02-20T03:35:27.000Z
|
vendor/github.com/elastic/beats/metricbeat/tests/system/test_ceph.py
|
PPACI/krakenbeat
|
e75ec8f006164acb8a57d0c9609bebe534955813
|
[
"Apache-2.0"
] | null | null | null |
vendor/github.com/elastic/beats/metricbeat/tests/system/test_ceph.py
|
PPACI/krakenbeat
|
e75ec8f006164acb8a57d0c9609bebe534955813
|
[
"Apache-2.0"
] | 1
|
2020-10-11T14:57:48.000Z
|
2020-10-11T14:57:48.000Z
|
import os
import metricbeat
import unittest
class Test(metricbeat.BaseTest):
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
def test_cluster_disk(self):
"""
ceph cluster_disk metricset test
"""
self.render_config_template(modules=[{
"name": "ceph",
"metricsets": ["cluster_disk"],
"hosts": self.get_hosts(),
"period": "1s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0, max_timeout=20)
proc.check_kill_and_wait()
output = self.read_output_json()
self.assertTrue(len(output) >= 1)
evt = output[0]
print evt
self.assert_fields_are_documented(evt)
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
def test_cluster_health(self):
"""
ceph cluster_health metricset test
"""
self.render_config_template(modules=[{
"name": "ceph",
"metricsets": ["cluster_health"],
"hosts": self.get_hosts(),
"period": "1s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0, max_timeout=20)
proc.check_kill_and_wait()
output = self.read_output_json()
self.assertTrue(len(output) >= 1)
evt = output[0]
print evt
self.assert_fields_are_documented(evt)
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
def test_monitor_health(self):
"""
ceph monitor_health metricset test
"""
self.render_config_template(modules=[{
"name": "ceph",
"metricsets": ["monitor_health"],
"hosts": self.get_hosts(),
"period": "1s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0, max_timeout=20)
proc.check_kill_and_wait()
output = self.read_output_json()
self.assertTrue(len(output) >= 1)
evt = output[0]
print evt
self.assert_fields_are_documented(evt)
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
def test_pool_disk(self):
"""
ceph pool_disk metricset test
"""
self.render_config_template(modules=[{
"name": "ceph",
"metricsets": ["pool_disk"],
"hosts": self.get_hosts(),
"period": "1s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0, max_timeout=20)
proc.check_kill_and_wait()
output = self.read_output_json()
self.assertTrue(len(output) >= 1)
evt = output[0]
print evt
self.assert_fields_are_documented(evt)
def get_hosts(self):
return [os.getenv('CEPH_HOST', 'localhost') + ':' +
os.getenv('CEPH_PORT', '5000')]
| 31.183673
| 75
| 0.563154
| 321
| 3,056
| 5.102804
| 0.190031
| 0.027473
| 0.068376
| 0.095238
| 0.859585
| 0.859585
| 0.859585
| 0.859585
| 0.859585
| 0.859585
| 0
| 0.013391
| 0.315772
| 3,056
| 97
| 76
| 31.505155
| 0.769967
| 0
| 0
| 0.788732
| 0
| 0
| 0.098391
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 0
| null | null | 0
| 0.042254
| null | null | 0.056338
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
223c9c15c24fe715667b67c4f4d5088386691436
| 239
|
py
|
Python
|
tests/test_cipher_dk3154.py
|
QMSS-G5072-2020/cipher_kow_daryl
|
53e720c9b14ff1703b444b55d318f3f7880b3720
|
[
"MIT"
] | null | null | null |
tests/test_cipher_dk3154.py
|
QMSS-G5072-2020/cipher_kow_daryl
|
53e720c9b14ff1703b444b55d318f3f7880b3720
|
[
"MIT"
] | null | null | null |
tests/test_cipher_dk3154.py
|
QMSS-G5072-2020/cipher_kow_daryl
|
53e720c9b14ff1703b444b55d318f3f7880b3720
|
[
"MIT"
] | null | null | null |
from cipher_dk3154 import __version__
from cipher_dk3154 import cipher_dk3154
def test_version():
assert __version__ == '0.1.0'
def test_cipher_symbols():
assert cipher_dk3154.cipher('72', 1, encrypt=True) == '72', "Should be 72"
| 29.875
| 78
| 0.74477
| 35
| 239
| 4.657143
| 0.457143
| 0.294479
| 0.196319
| 0.269939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126829
| 0.142259
| 239
| 8
| 78
| 29.875
| 0.668293
| 0
| 0
| 0
| 0
| 0
| 0.0875
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2248700691112e84059d8c583090110a637c3e36
| 77
|
py
|
Python
|
Chapter 01/Chap01_Example1.51.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
Chapter 01/Chap01_Example1.51.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
Chapter 01/Chap01_Example1.51.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
# kissing face with closed eyes
print("\N{kissing face with closed eyes}")
| 25.666667
| 43
| 0.727273
| 12
| 77
| 4.666667
| 0.583333
| 0.392857
| 0.535714
| 0.75
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168831
| 77
| 2
| 44
| 38.5
| 0.875
| 0.376623
| 0
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
22613c1e9eaa04eb9d377e5bab235db80e75f93c
| 4,410
|
py
|
Python
|
tests/functional/generator/test_sqlalchemy.py
|
xnuinside/omymodels
|
a523933a8138c385198c784b65087240e6eb31a3
|
[
"MIT"
] | 48
|
2021-03-20T14:09:33.000Z
|
2022-03-02T18:54:51.000Z
|
tests/functional/generator/test_sqlalchemy.py
|
geob3d/omymodels
|
2324256b6cefd31593afd2c24fce680f75137954
|
[
"MIT"
] | 10
|
2021-03-21T15:37:01.000Z
|
2022-01-14T14:07:09.000Z
|
tests/functional/generator/test_sqlalchemy.py
|
geob3d/omymodels
|
2324256b6cefd31593afd2c24fce680f75137954
|
[
"MIT"
] | 6
|
2021-04-08T14:26:28.000Z
|
2022-03-02T18:54:52.000Z
|
from omymodels import create_models
def test_with_enums():
expected = """import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from enum import Enum
from sqlalchemy.sql import func
from sqlalchemy.dialects.postgresql import JSON
Base = declarative_base()
class MaterialType(str, Enum):
article = 'article'
video = 'video'
class Material(Base):
__tablename__ = 'material'
id = sa.Column(sa.Integer(), autoincrement=True, primary_key=True)
title = sa.Column(sa.String(), nullable=False)
description = sa.Column(sa.Text())
link = sa.Column(sa.String(), nullable=False)
type = sa.Column(sa.Enum(MaterialType))
additional_properties = sa.Column(JSON(), server_default='{"key": "value"}')
created_at = sa.Column(sa.TIMESTAMP(), server_default=func.now())
updated_at = sa.Column(sa.TIMESTAMP())
"""
ddl = """
CREATE TYPE "material_type" AS ENUM (
'video',
'article'
);
CREATE TABLE "material" (
"id" SERIAL PRIMARY KEY,
"title" varchar NOT NULL,
"description" text,
"link" varchar NOT NULL,
"type" material_type,
"additional_properties" json DEFAULT '{"key": "value"}',
"created_at" timestamp DEFAULT (now()),
"updated_at" timestamp
);
"""
result = create_models(ddl, models_type="sqlalchemy")
assert expected == result["code"]
def test_foreign_keys():
expected = """import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Materials(Base):
__tablename__ = 'materials'
id = sa.Column(sa.Integer(), primary_key=True)
title = sa.Column(sa.String(), nullable=False)
description = sa.Column(sa.String())
link = sa.Column(sa.String())
created_at = sa.Column(sa.TIMESTAMP())
updated_at = sa.Column(sa.TIMESTAMP())
class MaterialAttachments(Base):
__tablename__ = 'material_attachments'
material_id = sa.Column(sa.Integer(), sa.ForeignKey('materials.id'))
attachment_id = sa.Column(sa.Integer(), sa.ForeignKey('attachments.id'))
class Attachments(Base):
__tablename__ = 'attachments'
id = sa.Column(sa.Integer(), primary_key=True)
title = sa.Column(sa.String())
description = sa.Column(sa.String())
created_at = sa.Column(sa.TIMESTAMP())
updated_at = sa.Column(sa.TIMESTAMP())
"""
ddl = """
CREATE TABLE "materials" (
"id" int PRIMARY KEY,
"title" varchar NOT NULL,
"description" varchar,
"link" varchar,
"created_at" timestamp,
"updated_at" timestamp
);
CREATE TABLE "material_attachments" (
"material_id" int,
"attachment_id" int
);
CREATE TABLE "attachments" (
"id" int PRIMARY KEY,
"title" varchar,
"description" varchar,
"created_at" timestamp,
"updated_at" timestamp
);
ALTER TABLE "material_attachments" ADD FOREIGN KEY ("material_id") REFERENCES "materials" ("id");
ALTER TABLE "material_attachments" ADD FOREIGN KEY ("attachment_id") REFERENCES "attachments" ("id");
"""
result = create_models(ddl, models_type="sqlalchemy")["code"]
assert result == expected
def test_upper_name_produces_the_same_result():
expected = """import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from enum import Enum
from sqlalchemy.sql import func
from sqlalchemy.dialects.postgresql import JSON
Base = declarative_base()
class MaterialType(str, Enum):
article = 'article'
video = 'video'
class Material(Base):
__tablename__ = 'material'
id = sa.Column(sa.Integer(), autoincrement=True, primary_key=True)
title = sa.Column(sa.String(), nullable=False)
description = sa.Column(sa.Text())
link = sa.Column(sa.String(), nullable=False)
type = sa.Column(sa.Enum(MaterialType))
additional_properties = sa.Column(JSON(), server_default='{"key": "value"}')
created_at = sa.Column(sa.TIMESTAMP(), server_default=func.now())
updated_at = sa.Column(sa.TIMESTAMP())
"""
ddl = """
CREATE TYPE "material_type" AS ENUM (
'video',
'article'
);
CREATE TABLE "material" (
"id" SERIAL PRIMARY KEY,
"title" varchar NOT NULL,
"description" text,
"link" varchar NOT NULL,
"type" material_type,
"additional_properties" json DEFAULT '{"key": "value"}',
"created_at" timestamp DEFAULT (NOW()),
"updated_at" timestamp
);
"""
result = create_models(ddl, models_type="sqlalchemy")
assert expected == result["code"]
| 25.056818
| 103
| 0.692063
| 537
| 4,410
| 5.52514
| 0.147114
| 0.078193
| 0.091001
| 0.048534
| 0.833839
| 0.826761
| 0.813279
| 0.707786
| 0.704752
| 0.704752
| 0
| 0
| 0.169841
| 4,410
| 175
| 104
| 25.2
| 0.810434
| 0
| 0
| 0.789063
| 0
| 0
| 0.886395
| 0.249433
| 0
| 0
| 0
| 0
| 0.023438
| 1
| 0.023438
| false
| 0
| 0.101563
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
226d83488bc4997ecc5fe72f1db5c271dc85c871
| 3,814
|
py
|
Python
|
AlliedPointsFarm.py
|
avianflu18/swgoh
|
c4a82095f85690b2c25eb0016a0ad74cd72fbea0
|
[
"MIT"
] | null | null | null |
AlliedPointsFarm.py
|
avianflu18/swgoh
|
c4a82095f85690b2c25eb0016a0ad74cd72fbea0
|
[
"MIT"
] | null | null | null |
AlliedPointsFarm.py
|
avianflu18/swgoh
|
c4a82095f85690b2c25eb0016a0ad74cd72fbea0
|
[
"MIT"
] | null | null | null |
# -*- encoding=utf8 -*-
__author__ = "DarthYu"
import sys
from airtest.core.api import *
def main():
while 1:
if exists(Template(r"tpl1605992707698.png", record_pos=(0.0, -0.003), resolution=(1280, 720))):
sys.exit()
elif exists(Template(r"tpl1605937955537.png", record_pos=(0.17, 0.243), resolution=(1280, 720))):
wait(Template(r"tpl1605937955537.png", record_pos=(0.17, 0.243), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605937955537.png", record_pos=(0.17, 0.243), resolution=(1280, 720)))
wait(Template(r"tpl1605938079807.png", record_pos=(0.382, 0.242), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605938079807.png", record_pos=(0.382, 0.242), resolution=(1280, 720)))
elif exists(Template(r"tpl1605938079807.png", record_pos=(0.382, 0.242), resolution=(1280, 720))):
wait(Template(r"tpl1605938079807.png", record_pos=(0.382, 0.242), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605938079807.png", record_pos=(0.382, 0.242), resolution=(1280, 720)))
elif exists(Template(r"tpl1605938171486.png", record_pos=(0.384, 0.243), resolution=(1280, 720))):
wait(Template(r"tpl1605938171486.png", record_pos=(0.384, 0.243), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605938171486.png", record_pos=(0.384, 0.243), resolution=(1280, 720)))
elif exists(Template(r"tpl1605943853149.png", record_pos=(0.362, 0.232), resolution=(1280, 720))):
wait(Template(r"tpl1605943853149.png", record_pos=(0.362, 0.232), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605943853149.png", record_pos=(0.362, 0.232), resolution=(1280, 720)))
else:
if exists(Template(r"tpl1605943770685.png", record_pos=(0.081, -0.134), resolution=(1280, 720))):
wait(Template(r"tpl1605943770685.png", record_pos=(0.081, -0.134), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605943770685.png", record_pos=(0.081, -0.134), resolution=(1280, 720)))
elif exists(Template(r"tpl1605989298314.png", record_pos=(0.071, -0.136), resolution=(1280, 720))):
wait(Template(r"tpl1605989298314.png", record_pos=(0.071, -0.136), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605989298314.png", record_pos=(0.071, -0.136), resolution=(1280, 720)))
elif exists(Template(r"tpl1605989397146.png", record_pos=(-0.362, 0.083), resolution=(1280, 720))):
wait(Template(r"tpl1605989397146.png", record_pos=(-0.362, 0.083), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605989397146.png", record_pos=(-0.362, 0.083), resolution=(1280, 720)))
elif exists(Template(r"tpl1605989514414.png", record_pos=(0.362, 0.23), resolution=(1280, 720))):
wait(Template(r"tpl1605989514414.png", record_pos=(0.362, 0.23), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605989514414.png", record_pos=(0.362, 0.23), resolution=(1280, 720)))
elif exists(Template(r"tpl1605944094403.png", record_pos=(-0.466, -0.247), resolution=(1280, 720))):
wait(Template(r"tpl1605944094403.png", record_pos=(-0.466, -0.247), resolution=(1280, 720)),intervalfunc=main)
touch(Template(r"tpl1605944094403.png", record_pos=(-0.466, -0.247), resolution=(1280, 720)))
elif exists(Template(r"tpl1605992707698.png", record_pos=(0.0, -0.003), resolution=(1280, 720))):
sys.exit(0)
else:
main()
main()
auto_setup(__file__)
| 73.346154
| 127
| 0.635291
| 492
| 3,814
| 4.843496
| 0.121951
| 0.117079
| 0.156106
| 0.169115
| 0.949224
| 0.949224
| 0.939152
| 0.916492
| 0.861939
| 0.861939
| 0
| 0.278461
| 0.189303
| 3,814
| 51
| 128
| 74.784314
| 0.492238
| 0.005506
| 0
| 0.186047
| 0
| 0
| 0.167737
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0.046512
| 0
| 0.069767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
226e2a32d3fab7db445995619ce097df0f891ba3
| 22,610
|
py
|
Python
|
src/comodash_api_client_lowlevel/comodash_api/queries_api.py
|
ComotionLabs/dash-sdk
|
8ab532dd58cbcb85969bb84503678cd54b3b2bfe
|
[
"Apache-2.0"
] | 1
|
2021-06-19T18:44:31.000Z
|
2021-06-19T18:44:31.000Z
|
src/comodash_api_client_lowlevel/comodash_api/queries_api.py
|
ComotionLabs/dash-sdk
|
8ab532dd58cbcb85969bb84503678cd54b3b2bfe
|
[
"Apache-2.0"
] | null | null | null |
src/comodash_api_client_lowlevel/comodash_api/queries_api.py
|
ComotionLabs/dash-sdk
|
8ab532dd58cbcb85969bb84503678cd54b3b2bfe
|
[
"Apache-2.0"
] | 3
|
2021-06-25T14:50:50.000Z
|
2021-09-16T13:00:29.000Z
|
"""
Comotion Dash API
Comotion Dash API # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from comodash_api_client_lowlevel.api_client import ApiClient, Endpoint as _Endpoint
from comodash_api_client_lowlevel.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from comodash_api_client_lowlevel.model.error import Error
from comodash_api_client_lowlevel.model.query import Query
from comodash_api_client_lowlevel.model.query_id import QueryId
from comodash_api_client_lowlevel.model.query_result import QueryResult
from comodash_api_client_lowlevel.model.query_text import QueryText
class QueriesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __download_csv(
self,
query_id,
**kwargs
):
"""Download the csv result file of a query # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_csv(query_id, async_req=True)
>>> result = thread.get()
Args:
query_id (str): Unique Identifier for the query
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
file_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['query_id'] = \
query_id
return self.call_with_http_info(**kwargs)
self.download_csv = _Endpoint(
settings={
'response_type': (file_type,),
'auth': [
'OAuth2Authorizer'
],
'endpoint_path': '/query/{query_id}/csv',
'operation_id': 'download_csv',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'query_id',
],
'required': [
'query_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'query_id':
(str,),
},
'attribute_map': {
'query_id': 'query_id',
},
'location_map': {
'query_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'binary/octet-stream',
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__download_csv
)
def __get_query(
self,
query_id,
**kwargs
):
"""Get information about a query # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_query(query_id, async_req=True)
>>> result = thread.get()
Args:
query_id (str): Unique Identifier for the query
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Query
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['query_id'] = \
query_id
return self.call_with_http_info(**kwargs)
self.get_query = _Endpoint(
settings={
'response_type': (Query,),
'auth': [
'OAuth2Authorizer'
],
'endpoint_path': '/query/{query_id}',
'operation_id': 'get_query',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'query_id',
],
'required': [
'query_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'query_id':
(str,),
},
'attribute_map': {
'query_id': 'query_id',
},
'location_map': {
'query_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_query
)
def __get_query_results(
self,
query_id,
**kwargs
):
"""Get paginated results of a query # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_query_results(query_id, async_req=True)
>>> result = thread.get()
Args:
query_id (str): Unique Identifier for the query
Keyword Args:
next_token (str): token to get next page of query results. Will be supplied in the response of the previous call if the result set is truncated.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
QueryResult
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['query_id'] = \
query_id
return self.call_with_http_info(**kwargs)
self.get_query_results = _Endpoint(
settings={
'response_type': (QueryResult,),
'auth': [
'OAuth2Authorizer'
],
'endpoint_path': '/query/{query_id}/result',
'operation_id': 'get_query_results',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'query_id',
'next_token',
],
'required': [
'query_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'query_id':
(str,),
'next_token':
(str,),
},
'attribute_map': {
'query_id': 'query_id',
'next_token': 'next_token',
},
'location_map': {
'query_id': 'path',
'next_token': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_query_results
)
def __run_query(
self,
query_text,
**kwargs
):
"""Run a query # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.run_query(query_text, async_req=True)
>>> result = thread.get()
Args:
query_text (QueryText):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
QueryId
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['query_text'] = \
query_text
return self.call_with_http_info(**kwargs)
self.run_query = _Endpoint(
settings={
'response_type': (QueryId,),
'auth': [
'OAuth2Authorizer'
],
'endpoint_path': '/query',
'operation_id': 'run_query',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'query_text',
],
'required': [
'query_text',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'query_text':
(QueryText,),
},
'attribute_map': {
},
'location_map': {
'query_text': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__run_query
)
def __stop_query(
self,
query_id,
**kwargs
):
"""Stop a running query # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stop_query(query_id, async_req=True)
>>> result = thread.get()
Args:
query_id (str): Unique Identifier for the query
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['query_id'] = \
query_id
return self.call_with_http_info(**kwargs)
self.stop_query = _Endpoint(
settings={
'response_type': None,
'auth': [
'OAuth2Authorizer'
],
'endpoint_path': '/query/{query_id}',
'operation_id': 'stop_query',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'query_id',
],
'required': [
'query_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'query_id':
(str,),
},
'attribute_map': {
'query_id': 'query_id',
},
'location_map': {
'query_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__stop_query
)
| 35.328125
| 173
| 0.459
| 1,981
| 22,610
| 4.974255
| 0.09894
| 0.034808
| 0.026385
| 0.0274
| 0.859955
| 0.832048
| 0.822306
| 0.794601
| 0.787295
| 0.770854
| 0
| 0.003198
| 0.460593
| 22,610
| 639
| 174
| 35.383412
| 0.804772
| 0.331048
| 0
| 0.634434
| 1
| 0
| 0.210183
| 0.028075
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014151
| false
| 0
| 0.021226
| 0
| 0.049528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
226e91b071316c405dc35012c8d1c1b6a2d2184e
| 8,789
|
py
|
Python
|
openstack_dashboard/dashboards/admin/rbac_policies/tests.py
|
hemantsonawane95/horizon-apelby
|
01a5e72219aeca8c1451701ee85e232ed0618751
|
[
"Apache-2.0"
] | 930
|
2015-01-04T08:06:03.000Z
|
2022-03-13T18:47:13.000Z
|
openstack_dashboard/dashboards/admin/rbac_policies/tests.py
|
hemantsonawane95/horizon-apelby
|
01a5e72219aeca8c1451701ee85e232ed0618751
|
[
"Apache-2.0"
] | 26
|
2015-02-23T16:37:31.000Z
|
2020-07-02T08:37:41.000Z
|
openstack_dashboard/dashboards/admin/rbac_policies/tests.py
|
hemantsonawane95/horizon-apelby
|
01a5e72219aeca8c1451701ee85e232ed0618751
|
[
"Apache-2.0"
] | 1,040
|
2015-01-01T18:48:28.000Z
|
2022-03-19T08:35:18.000Z
|
# Copyright 2019 vmware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from django.urls import reverse
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_TEMPLATE = 'horizon/common/_data_table_view.html'
INDEX_URL = reverse('horizon:admin:rbac_policies:index')
class RBACPolicyTests(test.BaseAdminViewTests):
@test.create_mocks({api.neutron: ('rbac_policy_list',
'network_list',
'policy_list',
'is_extension_supported',),
api.keystone: ('tenant_list',)})
def test_index(self):
tenants = self.tenants.list()
self.mock_tenant_list.return_value = [tenants, False]
self.mock_network_list.return_value = self.networks.list()
self.mock_policy_list.return_value = self.qos_policies.list()
self.mock_rbac_policy_list.return_value = self.rbac_policies.list()
self.mock_is_extension_supported.return_value = True
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, INDEX_TEMPLATE)
rbac_policies = res.context['table'].data
self.assertCountEqual(rbac_policies, self.rbac_policies.list())
self.mock_network_list.assert_called_once_with(test.IsHttpRequest())
self.mock_policy_list.assert_called_once_with(test.IsHttpRequest())
self.mock_tenant_list.assert_called_once_with(test.IsHttpRequest())
self.mock_is_extension_supported.assert_called_once_with(
test.IsHttpRequest(), extension_alias='qos')
self.mock_rbac_policy_list.assert_called_once_with(
test.IsHttpRequest())
@test.create_mocks({api.neutron: ('network_list',
'rbac_policy_create',
'is_extension_supported',),
api.keystone: ('tenant_list',)})
def test_rbac_create_post_with_network_type(self):
network = self.networks.first()
tenants = self.tenants.list()
rbac_policy = self.rbac_policies.first()
self.mock_tenant_list.return_value = [tenants, False]
self.mock_network_list.return_value = self.networks.list()
self.mock_is_extension_supported.return_value = False
self.mock_rbac_policy_create.return_value = rbac_policy
form_data = {'target_tenant': rbac_policy.target_tenant,
'action_object_type': 'external_network',
'network_id': network.id}
url = reverse('horizon:admin:rbac_policies:create')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_tenant_list.assert_called_once_with(test.IsHttpRequest())
self.mock_network_list.assert_called_once_with(test.IsHttpRequest())
self.mock_is_extension_supported.assert_called_once_with(
test.IsHttpRequest(), extension_alias='qos')
params = {'target_tenant': rbac_policy.target_tenant,
'action': 'access_as_external',
'object_type': 'network',
'object_id': network.id}
self.mock_rbac_policy_create.assert_called_once_with(
test.IsHttpRequest(), **params)
@test.create_mocks({api.neutron: ('network_list',
'policy_list',
'rbac_policy_create',
'is_extension_supported',),
api.keystone: ('tenant_list',)})
def test_rbac_create_post_with_qos_policy_type(self):
qos_policy = self.qos_policies.first()
tenants = self.tenants.list()
rbac_policy = self.rbac_policies.filter(object_type="qos_policy")[0]
self.mock_tenant_list.return_value = [tenants, False]
self.mock_network_list.return_value = self.networks.list()
self.mock_policy_list.return_value = self.qos_policies.list()
self.mock_is_extension_supported.return_value = True
self.mock_rbac_policy_create.return_value = rbac_policy
form_data = {'target_tenant': rbac_policy.target_tenant,
'action_object_type': 'shared_qos_policy',
'qos_policy_id': qos_policy.id}
url = reverse('horizon:admin:rbac_policies:create')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_tenant_list.assert_called_once_with(test.IsHttpRequest())
self.mock_network_list.assert_called_once_with(test.IsHttpRequest())
self.mock_policy_list.assert_called_once_with(test.IsHttpRequest())
self.mock_is_extension_supported.assert_called_once_with(
test.IsHttpRequest(), extension_alias='qos')
params = {'target_tenant': rbac_policy.target_tenant,
'action': 'access_as_shared',
'object_type': 'qos_policy',
'object_id': qos_policy.id}
self.mock_rbac_policy_create.assert_called_once_with(
test.IsHttpRequest(), **params)
@test.create_mocks({api.neutron: ('network_list',
'is_extension_supported',),
api.keystone: ('tenant_list',)})
def test_rbac_create_post_with_network_type_and_no_network_id(self):
tenants = self.tenants.list()
rbac_policy = self.rbac_policies.first()
self.mock_tenant_list.return_value = [tenants, False]
self.mock_network_list.return_value = self.networks.list()
self.mock_is_extension_supported.return_value = False
# note that 'network_id' is not included
form_data = {'target_tenant': rbac_policy.target_tenant,
'action_object_type': 'external_network'}
url = reverse('horizon:admin:rbac_policies:create')
res = self.client.post(url, form_data)
self.assertFormErrors(res, 1)
self.assertContains(res, "This field is required.")
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_list, 2, mock.call(test.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_network_list, 2, mock.call(test.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_extension_supported, 2,
mock.call(test.IsHttpRequest(), extension_alias='qos'))
@test.create_mocks({api.neutron: ('network_list',
'policy_list',
'is_extension_supported',),
api.keystone: ('tenant_list',)})
def test_rbac_create_post_with_qos_policy_type_and_no_qos_policy_id(self):
tenants = self.tenants.list()
rbac_policy = self.rbac_policies.filter(object_type="qos_policy")[0]
self.mock_tenant_list.return_value = [tenants, False]
self.mock_network_list.return_value = self.networks.list()
self.mock_policy_list.return_value = self.qos_policies.list()
self.mock_is_extension_supported.return_value = True
# note that 'qos_policy_id' is not included
form_data = {'target_tenant': rbac_policy.target_tenant,
'action_object_type': 'shared_qos_policy'}
url = reverse('horizon:admin:rbac_policies:create')
res = self.client.post(url, form_data)
self.assertFormErrors(res, 1)
self.assertContains(res, "This field is required.")
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_tenant_list, 2, mock.call(test.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_network_list, 2, mock.call(test.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_policy_list, 2, mock.call(test.IsHttpRequest()))
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_is_extension_supported, 2,
mock.call(test.IsHttpRequest(), extension_alias='qos'))
| 47.766304
| 78
| 0.662191
| 1,036
| 8,789
| 5.254826
| 0.142857
| 0.061719
| 0.055107
| 0.051433
| 0.809882
| 0.793718
| 0.782329
| 0.776451
| 0.767818
| 0.762123
| 0
| 0.002857
| 0.243259
| 8,789
| 183
| 79
| 48.027322
| 0.815667
| 0.074866
| 0
| 0.741007
| 0
| 0
| 0.116205
| 0.038817
| 0
| 0
| 0
| 0
| 0.223022
| 1
| 0.035971
| false
| 0
| 0.028777
| 0
| 0.071942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f282e4be744ad893f9a6b08380e595333ee9d93
| 16,471
|
py
|
Python
|
weather_models/flo2d/flo2d_server_250_prod.py
|
CUrW-SL/DSS-Framework
|
43a39b322ffb0eb92dd116e77cf9a8479357a121
|
[
"MIT"
] | null | null | null |
weather_models/flo2d/flo2d_server_250_prod.py
|
CUrW-SL/DSS-Framework
|
43a39b322ffb0eb92dd116e77cf9a8479357a121
|
[
"MIT"
] | null | null | null |
weather_models/flo2d/flo2d_server_250_prod.py
|
CUrW-SL/DSS-Framework
|
43a39b322ffb0eb92dd116e77cf9a8479357a121
|
[
"MIT"
] | null | null | null |
import os
from builtins import print
from http.server import BaseHTTPRequestHandler, HTTPServer
import json
from urllib.parse import urlparse, parse_qs
# from raincelldat.gen_raincell import create_hybrid_raincell
from curw_sim.gen_raincell_curw_sim import create_sim_hybrid_raincell
from inflowdat.get_inflow import create_inflow
from outflow.gen_outflow import create_outflow
from outflowdat.gen_outflow_old import create_outflow_old
from flo2d.run_model import execute_flo2d_250m, flo2d_model_completed
from waterlevel.upload_waterlevel import upload_waterlevels_curw
from extract.extract_water_level_hourly_run import upload_waterlevels
from extract.extract_discharge_hourly_run import upload_discharges
from chan.gen_chan import create_chan
from os.path import join as pjoin
from datetime import datetime, timedelta
HOST_ADDRESS = '10.138.0.4'
# HOST_ADDRESS = '0.0.0.0'
HOST_PORT = 8088
def set_daily_dir(run_date, run_time):
start_datetime = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S')
run_time = start_datetime.strftime('%H-%M-%S')
dir_path = pjoin(os.getcwd(), 'output', run_date, run_time)
if not os.path.exists(dir_path):
try:
os.makedirs(dir_path)
except OSError as e:
print(str(e))
print('set_daily_dir|dir_path : ', dir_path)
return dir_path
class StoreHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.timeout = 2100
print('Handle GET request...')
if self.path.startswith('/create-raincell'):
os.chdir(r"D:\flo2d_hourly")
print('create-raincell')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
[forward] = query_components["forward"]
[backward] = query_components["backward"]
print('[run_date, run_time] : ', [run_date, run_time])
dir_path = set_daily_dir(run_date, run_time)
create_hybrid_raincell(dir_path, run_date, run_time, forward, backward)
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/create-sim-raincell'):
os.chdir(r"D:\flo2d_hourly")
print('create-sim-raincell')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
[forward] = query_components["forward"]
[backward] = query_components["backward"]
print('[run_date, run_time] : ', [run_date, run_time])
dir_path = set_daily_dir(run_date, run_time)
create_sim_hybrid_raincell(dir_path, run_date, run_time, forward, backward,
res_mins=5, flo2d_model='flo2d_250',
calc_method='MME')
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/create-inflow'):
os.chdir(r"D:\flo2d_hourly")
print('create-inflow')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
print('[run_date, run_time] : ', [run_date, run_time])
dir_path = set_daily_dir(run_date, run_time)
[backward] = query_components["backward"]
[forward] = query_components["forward"]
duration_days = (int(backward), int(forward))
ts_start_date = datetime.strptime(run_date, '%Y-%m-%d') - timedelta(days=duration_days[0])
ts_end_date = datetime.strptime(run_date, '%Y-%m-%d') + timedelta(days=duration_days[1])
ts_end_date = ts_end_date.strftime('%Y-%m-%d')
ts_start_date = ts_start_date.strftime('%Y-%m-%d')
ts_start_time = '00:00:00'
ts_start = '{} {}'.format(ts_start_date, ts_start_time)
ts_end = '{} {}'.format(ts_end_date, ts_start_time)
print('create_inflow-[ts_start, ts_end]', [ts_start, ts_end])
create_inflow(dir_path, ts_start, ts_end)
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/create-chan'):
os.chdir(r"D:\flo2d_hourly")
print('create-chan')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
[forward] = query_components["forward"]
[backward] = query_components["backward"]
print('[run_date, run_time] : ', [run_date, run_time])
dir_path = set_daily_dir(run_date, run_time)
duration_days = (int(backward), int(forward))
ts_start_date = datetime.strptime(run_date, '%Y-%m-%d') - timedelta(days=duration_days[0])
ts_end_date = datetime.strptime(run_date, '%Y-%m-%d') + timedelta(days=duration_days[1])
ts_end_date = ts_end_date.strftime('%Y-%m-%d')
ts_start_date = ts_start_date.strftime('%Y-%m-%d')
ts_start_time = '00:00:00'
ts_start = '{} {}'.format(ts_start_date, ts_start_time)
ts_end = '{} {}'.format(ts_end_date, ts_start_time)
print('create_chan-[ts_start, ts_end]', [ts_start, ts_end])
create_chan(dir_path, ts_start, 'flo2d_250')
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/create-outflow'):
os.chdir(r"D:\flo2d_hourly")
print('create-outflow')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
[forward] = query_components["forward"]
[backward] = query_components["backward"]
print('[run_date, run_time] : ', [run_date, run_time])
dir_path = set_daily_dir(run_date, run_time)
duration_days = (int(backward), int(forward))
ts_start_date = datetime.strptime(run_date, '%Y-%m-%d') - timedelta(days=duration_days[0])
ts_end_date = datetime.strptime(run_date, '%Y-%m-%d') + timedelta(days=duration_days[1])
ts_end_date = ts_end_date.strftime('%Y-%m-%d')
ts_start_date = ts_start_date.strftime('%Y-%m-%d')
ts_start_time = '00:00:00'
ts_start = '{} {}'.format(ts_start_date, ts_start_time)
ts_end = '{} {}'.format(ts_end_date, ts_start_time)
print('create_outflow-[ts_start, ts_end]', [ts_start, ts_end])
# create_outflow(dir_path, ts_start, ts_end)
create_outflow(dir_path, ts_start, ts_end, 'flo2d_250', 'TSF')
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/create-outflow-old'):
os.chdir(r"D:\flo2d_hourly")
print('create-outflow')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
[forward] = query_components["forward"]
[backward] = query_components["backward"]
print('[run_date, run_time] : ', [run_date, run_time])
dir_path = set_daily_dir(run_date, run_time)
duration_days = (int(backward), int(forward))
ts_start_date = datetime.strptime(run_date, '%Y-%m-%d') - timedelta(days=duration_days[0])
ts_end_date = datetime.strptime(run_date, '%Y-%m-%d') + timedelta(days=duration_days[1])
ts_end_date = ts_end_date.strftime('%Y-%m-%d')
ts_start_date = ts_start_date.strftime('%Y-%m-%d')
ts_start_time = '00:00:00'
ts_start = '{} {}'.format(ts_start_date, ts_start_time)
ts_end = '{} {}'.format(ts_end_date, ts_start_time)
print('create_outflow-[ts_start, ts_end]', [ts_start, ts_end])
create_outflow_old(dir_path, ts_start, ts_end)
#create_outflow(dir_path, ts_start, ts_end, 'flo2d_250', 'TSF')
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/run-flo2d'):
os.chdir(r"D:\flo2d_hourly")
print('run-flo2d')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
print('[run_date, run_time] : ', [run_date, run_time])
dir_path = set_daily_dir(run_date, run_time)
execute_flo2d_250m(dir_path, run_date, run_time)
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/flo2d-completed'):
os.chdir(r"D:\flo2d_hourly")
print('flo2d-completed')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
dir_path = set_daily_dir(run_date, run_time)
try:
flo2d_model_completed(dir_path, run_date, run_time)
except Exception as ex:
print('flo2d_model_completed|Exception : ', str(ex))
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/extract-data'):
os.chdir(r"D:\flo2d_hourly")
print('extract-data')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
dir_path = set_daily_dir(run_date, run_time)
backward = '2'
forward = '3'
duration_days = (int(backward), int(forward))
ts_start_date = datetime.strptime(run_date, '%Y-%m-%d') - timedelta(days=duration_days[0])
ts_start_date = ts_start_date.strftime('%Y-%m-%d')
ts_start_time = '00:00:00'
# upload_waterlevels_curw(dir_path, ts_start_date, ts_start_time)
upload_waterlevels(dir_path, ts_start_date, ts_start_time, run_date, run_time)
# upload discharges to curw_fcst database
upload_discharges(dir_path, ts_start_date, ts_start_time, run_date, run_time)
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if self.path.startswith('/extract-curw'):
os.chdir(r"D:\flo2d_hourly")
print('extract-data')
response = {}
try:
query_components = parse_qs(urlparse(self.path).query)
print('query_components : ', query_components)
[run_date] = query_components["run_date"]
[run_time] = query_components["run_time"]
dir_path = set_daily_dir(run_date, run_time)
backward = '2'
forward = '3'
duration_days = (int(backward), int(forward))
ts_start_date = datetime.strptime(run_date, '%Y-%m-%d') - timedelta(days=duration_days[0])
ts_start_date = ts_start_date.strftime('%Y-%m-%d')
run_date = datetime.strptime(run_date, '%Y-%m-%d') + timedelta(days=1)
run_date = run_date.strftime('%Y-%m-%d')
ts_start_time = '00:00:00'
print('upload_waterlevels_curw|[ts_start_date, ts_start_time, run_date, run_time] : ', [ts_start_date,
ts_start_time,
run_date,
run_time])
upload_waterlevels_curw(dir_path, ts_start_date, ts_start_time, run_date, run_time)
response = {'response': 'success'}
except Exception as e:
print(str(e))
reply = json.dumps(response)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(str.encode(reply))
if __name__ == '__main__':
try:
print('starting server...')
server_address = (HOST_ADDRESS, HOST_PORT)
httpd = HTTPServer(server_address, StoreHandler)
print('running server...')
httpd.serve_forever()
except Exception as e:
print(str(e))
| 48.875371
| 118
| 0.55874
| 1,885
| 16,471
| 4.581432
| 0.075862
| 0.056739
| 0.054423
| 0.074572
| 0.814266
| 0.809866
| 0.804423
| 0.792612
| 0.785433
| 0.769453
| 0
| 0.01291
| 0.32281
| 16,471
| 336
| 119
| 49.020833
| 0.761341
| 0.017789
| 0
| 0.755627
| 0
| 0
| 0.127899
| 0.011627
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006431
| false
| 0
| 0.051447
| 0
| 0.064309
| 0.160772
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f3a7eb3472e643d62233ffb1cb64ac8ecab1bf9
| 570
|
py
|
Python
|
train_mosmed_timm-regnetx_002_grid_distortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_mosmed_timm-regnetx_002_grid_distortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_mosmed_timm-regnetx_002_grid_distortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold0_grid_distortion.yml",
"python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold1_grid_distortion.yml",
"python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold2_grid_distortion.yml",
"python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold3_grid_distortion.yml",
"python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold4_grid_distortion.yml",
]
for l in ls:
os.system(l)
| 51.818182
| 108
| 0.854386
| 85
| 570
| 5.317647
| 0.294118
| 0.110619
| 0.132743
| 0.210177
| 0.847345
| 0.847345
| 0.847345
| 0.847345
| 0.847345
| 0.847345
| 0
| 0.037106
| 0.054386
| 570
| 11
| 109
| 51.818182
| 0.801484
| 0
| 0
| 0
| 0
| 0
| 0.884413
| 0.665499
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3f5656715943fb967956d8f53aee03517e6cdf26
| 46,658
|
py
|
Python
|
renderer/viewer2D.py
|
Paultool/frankmocap
|
b8bb7b587c0841b9292edb147729de581c66054c
|
[
"BSD-3-Clause"
] | 1,612
|
2020-09-23T08:45:18.000Z
|
2022-03-31T18:05:20.000Z
|
renderer/viewer2D.py
|
Paultool/frankmocap
|
b8bb7b587c0841b9292edb147729de581c66054c
|
[
"BSD-3-Clause"
] | 189
|
2020-09-24T11:24:27.000Z
|
2022-03-29T06:30:17.000Z
|
renderer/viewer2D.py
|
Paultool/frankmocap
|
b8bb7b587c0841b9292edb147729de581c66054c
|
[
"BSD-3-Clause"
] | 290
|
2020-09-23T13:40:50.000Z
|
2022-03-28T07:31:22.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
#Visualization Function
import cv2
import numpy as np
import PIL
from PIL.Image import Image
def __ValidateNumpyImg(inputImg):
if isinstance(inputImg, Image):
# inputImg = cv2.cvtColor(np.array(inputImg), cv2.COLOR_RGB2BGR)
inputImg = np.array(inputImg)
return inputImg #Q? is this copying someting (wasting memory or time?)?
veryFirstImShow = True
def ImShow(inputImg, waitTime=1, bConvRGB2BGR=False,name='image', scale=1.0):
inputImg = __ValidateNumpyImg(inputImg)
if scale!=1.0:
inputImg = cv2.resize(inputImg, (inputImg.shape[0]*int(scale), inputImg.shape[1]*int(scale)))
if bConvRGB2BGR:
inputImg = cv2.cvtColor(inputImg, cv2.COLOR_RGB2BGR)
cv2.imshow(name,inputImg)
global veryFirstImShow
if False:#veryFirstImShow:
print(">> Press any key to move on")
cv2.waitKey(0) #the initial one is always blank... why?
veryFirstImShow = 0
else:
cv2.waitKey(waitTime)
def ImgSC(inputImg, waitTime=1, bConvRGB2BGR=False,name='image', scale=1.0):
inputImg = __ValidateNumpyImg(inputImg)
minVal = np.min(inputImg)
maxVal = np.max(inputImg)
#rescale
inputImg = (inputImg-minVal)/ (maxVal-minVal)*255
if scale!=1.0:
inputImg = cv2.resize(inputImg, (inputImg.shape[0]*int(scale), inputImg.shape[1]*int(scale)))
if bConvRGB2BGR:
inputImg = cv2.cvtColor(inputImg, cv2.COLOR_RGB2BGR)
cv2.imshow(name,inputImg)
global veryFirstImShow
if veryFirstImShow:
print(">> Press any key to move on")
cv2.waitKey(0) #the initial one is always blank... why?
veryFirstImShow = 0
else:
cv2.waitKey(waitTime)
# import matplotlib.pyplot as plt
# def Plot(values, title=None):
# plt.plot(values)
# if title is not None:
# plt.title(title)#, loc='left', fontsize=12, fontweight=0, color='orange')
# plt.show()
#bbe: min_pt, max_pt
def Vis_Bbox_minmaxPt(inputImg, min_pt, max_pt, color=None):
bbr = [min_pt[0],min_pt[1], max_pt[0]- min_pt[0], max_pt[1]- min_pt[1]]
return Vis_Bbox(inputImg, bbr, color)
def Vis_Bbox_XYXY(inputImg, bbox_xyxy, color=None):
#draw biggest bbox
pt1 = ( int(bbox_xyxy[0]),int(bbox_xyxy[1]) )
pt2 = (int(bbox_xyxy[2]),int(bbox_xyxy[3]) )
if color is None:
color = (0,0,255)
cv2.rectangle(inputImg, pt1, pt2,color, 3)
return inputImg
def Vis_Bbox(inputImg, bbox_xyhw, color= None):
return Vis_Bbox_XYWH(inputImg, bbox_xyhw, color)
#bbe: [leftTop_x,leftTop_y,width,height]
def Vis_Bbox_XYWH(inputImg, bbox_xyhw, color= None):
inputImg = __ValidateNumpyImg(inputImg)
#draw biggest bbox
pt1 = ( int(bbox_xyhw[0]),int(bbox_xyhw[1]) )
pt2 = (int(bbox_xyhw[0] + bbox_xyhw[2]),int(bbox_xyhw[1] + bbox_xyhw[3]) )
if color is None:
color = (0,0,255)
cv2.rectangle(inputImg, pt1, pt2,color, 3)
return inputImg
def Vis_CocoBbox(inputImg, coco_annot):
inputImg = __ValidateNumpyImg(inputImg)
bbr = np.round(coco_annot['bbox']) #[leftTop_x,leftTop_y,width,height]
#draw biggest bbox
pt1 = ( int(bbr[0]),int(bbr[1]) )
pt2 = (int(bbr[0] + bbr[2]),int(bbr[1] + bbr[3]) )
cv2.rectangle(inputImg, pt1, pt2,(255,255,255), 3)
return inputImg
# connections_right = [
# {0, 2}, {2, 4}, {0, 6} //nect, rightEye, rightEar
# , {6, 8}, {8, 10}, {6,12}, {12,14} , {14, 16}
# };
#]
def Vis_CocoSkeleton(keypoints, image=None):
# def Vis_CocoSkeleton(inputImg, coco_annot):
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
image = __ValidateNumpyImg(image)
#COCO17 original annotation ordering
link2D = [ [0, 1], [1,3], #nose(0), leftEye(1), leftEar(3)
[0,5], [5, 7], [7, 9], #leftShoulder(5), leftArm(7), leftWrist(9)
[0, 11], [11, 13], [13, 15], #leftHip(11), leftKnee(13), leftAnkle(15)
[0,2], [2,4], #nose(0), rightEye(2), rightEar(4)
[0,6], [6, 8], [8, 10], #rightShoulder(6), rightArm(8), rightWrist(10)
[0, 12], [12, 14], [14, 16] #rightHip(12), rightKnee(14), rightAnkle(16)
]
bLeft = [ 1,1,
1, 1, 1,
1,1,1,
0,0,
0,0,0,
0,0,0]
# keypoints = np.round(coco_annot['keypoints']) #coco_annot['keypoints']: list with length 51
if keypoints.shape[0] == 51:
keypoints = np.reshape(keypoints, (-1,3)) #(17,3): (X, Y, Label)
else:
keypoints = np.reshape(keypoints, (-1,2)) #(17,3): (X, Y, Label)
radius = 4
for k in np.arange( len(keypoints) ):
cv2.circle(image, (int(keypoints[k][0]), int(keypoints[k][1]) ), radius,(0,0,255),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (200,200,200)
if keypoints[parent][0] ==0 or keypoints[child][0]==0: # //not annotated one
continue
cv2.line(image, (int(keypoints[parent][0]), int(keypoints[parent][1])), (int(keypoints[child][0]), int(keypoints[child][1])), c, radius - 2)
return image
DP_partIdx ={
'Torso_Back': 1,
'Torso_Front': 2,
'RHand': 3,
'LHand': 4,
'LFoot': 5,
'RFoot': 6,
'R_upperLeg_back': 7,
'L_upperLeg_back': 8,
'R_upperLeg_front': 9,
'L_upperLeg_front': 10,
'R_lowerLeg_back': 11,
'L_lowerLeg_back': 12,
'R_lowerLeg_front': 13,
'L_lowerLeg_front': 14,
'L_upperArm_front': 15,
'R_upperArm_front': 16,
'L_upperArm_back': 17,
'R_upperArm_back': 18,
'L_lowerArm_back': 19,
'R_lowerArm_back': 20,
'L_lowerArm_front': 21,
'R_lowerArm_front': 22,
'RFace': 23,
'LFace': 24
}
def Vis_Densepose(inputImg, coco_annot):
inputImg = __ValidateNumpyImg(inputImg)
import sys
sys.path.append('/home/hjoo/data/DensePose/detectron/utils/')
import densepose_methods as dp_utils
DP = dp_utils.DensePoseMethods()
if('dp_x' not in coco_annot.keys()):
print("## Warning: No Densepose coco_annotation")
return inputImg
bbr = np.round(coco_annot['bbox']) #[leftTop_x,leftTop_y,width,height]
Point_x = np.array(coco_annot['dp_x'])/ 255. * bbr[2] + bbr[0] # Strech the points to current box. from 255x255 -> [bboxWidth,bboxheight]
Point_y = np.array(coco_annot['dp_y'])/ 255. * bbr[3] + bbr[1] # Strech the points to current box.
# part_seg_index = np.array(coco_annot['dp_I']) # part segment info
#coco_annot['dp_I']: indexing
# Torso Back: 1
# Torso front: 2
# RHand: 3
# LHand: 4
# LFoot: 5
# RFoot: 6
# R_upperLeg_back 7
# L_upperLeg_back 8
# R_upperLeg_front 9
# L_upperLeg_front 10
# R_lowerLeg_back 11
# L_lowerLeg_back 12
# R_lowerLeg_front 13
# L_lowerLeg_front 14
# L_upperArm_front 15
# R_upperArm_front 16
# L_upperArm_back 17
# R_upperArm_back 18
# L_lowerArm_back 19
# R_lowerArm_back 20
# L_lowerArm_front 21
# R_lowerArm_front 22
# RFace: 23
# LFace: 24
#Found BBoxes for rhand, lhand, and face using DensePose Data
RHandIdx = [i for i,x in enumerate(coco_annot['dp_I']) if x == DP_partIdx['RHand'] ] #3.0]
if len(RHandIdx)>0:
minX = min(Point_x[RHandIdx])
maxX = max(Point_x[RHandIdx])
minY = min(Point_y[RHandIdx])
maxY = max(Point_y[RHandIdx])
RhandBBox = [minX, minY, maxX-minX, maxY-minY]
else:
RhandBBox = [-1,-1,-1,-1]
LHandIdx = [i for i,x in enumerate(coco_annot['dp_I']) if x == DP_partIdx['LHand'] ]#4.0]
if len(LHandIdx)>0:
minX = min(Point_x[LHandIdx])
maxX = max(Point_x[LHandIdx])
minY = min(Point_y[LHandIdx])
maxY = max(Point_y[LHandIdx])
LhandBBox = [minX, minY, maxX-minX, maxY-minY]
else:
LhandBBox = [-1,-1,-1,-1]
FaceIdx = [i for i,x in enumerate(coco_annot['dp_I']) if x == DP_partIdx['RFace'] or x == DP_partIdx['LFace'] ] #23.0 or x == 24.0]
if len(FaceIdx)>0:
minX = min(Point_x[FaceIdx])
maxX = max(Point_x[FaceIdx])
minY = min(Point_y[FaceIdx])
maxY = max(Point_y[FaceIdx])
FaceBBox = [minX, minY, maxX-minX, maxY-minY]
else:
FaceBBox = [-1,-1,-1,-1]
# #U,V,I -> Adam vertex (Todo: should be reverified)
# adamVerIdx_vec = np.zeros(len(coco_annot['dp_I']))
# for i, (ii,uu,vv) in enumerate(zip(coco_annot['dp_I'],coco_annot['dp_U'],coco_annot['dp_V'])):
# vertexId = DP.IUV2VertexId(ii,uu,vv)
# adamVerIdx_vec[i] = vertexId
# #draw biggest bbox
# pt1 = ( int(bbr[0]),int(bbr[1]) )
# pt2 = (int(bbr[0] + bbr[2]),int(bbr[1] + bbr[3]) )
# cv2.rectangle(inputImg, pt1, pt2,(0,0,0),1)
#draw RHand bbox
pt1 = ( int(RhandBBox[0]),int(RhandBBox[1]) )
pt2 = (int(RhandBBox[0] + RhandBBox[2]),int(RhandBBox[1] + RhandBBox[3]) )
cv2.rectangle(inputImg, pt1, pt2,(0,0,255),2)
#draw lHand bbox
pt1 = ( int(LhandBBox[0]),int(LhandBBox[1]) )
pt2 = (int(LhandBBox[0] + LhandBBox[2]),int(LhandBBox[1] + LhandBBox[3]) )
cv2.rectangle(inputImg, pt1, pt2,(0,255,0),2)
#draw Face bbox
pt1 = ( int(FaceBBox[0]),int(FaceBBox[1]) )
pt2 = (int(FaceBBox[0] + FaceBBox[2]),int(FaceBBox[1] + FaceBBox[3]) )
cv2.rectangle(inputImg, pt1, pt2,(255,0,0),2)
# Draw Densepose Keypoints
tempColorIdx = np.array(coco_annot['dp_I'])/ 24 *255
#tempColorIdx = np.array(coco_annot['dp_U']) *255
#tempColorIdx = np.array(coco_annot['dp_V']) *255
tempColorIdx = np.uint8(tempColorIdx)
tempColorIdx = cv2.applyColorMap(tempColorIdx, cv2.COLORMAP_JET)
for cnt, pt in enumerate(zip(Point_x,Point_y,tempColorIdx, coco_annot['dp_I'])):
# if pt[3] != DP_partIdx['Torso_Front']: #Uncomment this if you want to draw specific part
# continue
#tempColorIdx = coco_annot['dp_I']
tempColor = pt[2][0].astype(np.int32).tolist()
cv2.circle(inputImg,(int(pt[0]),int(pt[1])), 5,tempColor, -1)
return inputImg
#H36m skeleton32
#skel can be
# : (17,2) #with Nose
# : (16,2) #without NOse
# : (32,2) #original
def Vis_Skeleton_2D_H36m(pt2d, image = None, color=None):
pt2d = np.reshape(pt2d,[-1,2]) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
if(pt2d.shape[0]==16):
print("Vis_Skeleton_2D_H36m: {} joints".format(16))
#Without Nose
link2D = [ [0,1],[1,2],[2,3],#root(0), rHip(1), rKnee(2), rAnkle(3)
[0,4],[4,5],[5,6],#root(0, lHip(4), lKnee(5), lAnkle(6)
[0,7], [7,8], [8,9], #root(0, spineMid(7), neck(8), head(9)
[8,10], [10,11], [11,12], #Left Arms. neck(8). lshoulder(10), lElbow(11), lWrist (12)
[8,13], [13,14], [14,15] #Right Arm, neck(8), rshoulder(13), rElbow(14), rWrist (15)
]
bLeft = [ 0,0,0,
1, 1, 1,
1,1,1,
1,1,1,
0,0,0]
elif pt2d.shape[0]==17:
print("Vis_Skeleton_2D_H36m: {} joints".format(17))
#With Nose
link2D = [ [0,1],[1,2],[2,3],#root(0), rHip(1), rKnee(2), rAnkle(3)
[0,4],[4,5],[5,6],#root(0, lHip(4), lKnee(5), lAnkle(6)
[0,7], [7,8], [8,9], [9,10], #root(0, spineMid(7), neck(8), nose(9), head(9)
[8,11], [11,12], [12,13], #Left Arms. neck(8). lshoulder(11), lElbow(12), lWrist (13)
[8,14], [14,15], [15,16] #Right Arm, neck(8), rshoulder(14), rElbow(15), rWrist (16)
]
bLeft = [ 0,0,0,
1, 1, 1,
1,1,1, 1,
1,1,1,
0,0,0]
else:
print("Vis_Skeleton_2D_H36m: {} joints".format(32))
#Human 36m DB's mocap data. 32 joints
link2D = [ [0,1],[1,2],[2,3],[3,4],[4,5], #RightLeg: root(0), rHip(1), rKnee(2), rAnkle(3), rFootMid(4), rFootEnd(5)
[0,6],[6,7],[7,8],[8,9], [9,10], #LeftLeg: root, lHip(6), lKnee(7), lAnkle(8), lFootMid(9), lFootEnd(10)
[11,12], [12,13], [13,14], [14,15], #root2(11), spineMid(12), neck(13), nose(14), head(15) #0,11 are the same points?
[16,17], [17,18], [18,19], [20,21], [20,22], #Left Arms. neck(16==13), lshoulder(17), lElbow(18), lWrist (19=20), lThumb(21), lMiddleFinger(22)
[24,25], [25,26], [26,27], [27,29], [27,30] #Right Arm, neck(24==13), rshoulder(25), rElbow(26), rWrist (27=28), rThumb(29), rMiddleFinger(30)
]
bLeft = [0 ,0, 0, 0, 0,
1, 1, 1, 1, 1,
1, 1, 1, 1,
1, 1, 1, 1, 1,
0, 0, 0, 0, 0] #To draw left as different color. Torso is treated as left
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
return image
#Panoptic Studio SMC19 ordering
def Vis_Skeleton_2D_SMC19(pt2d, image = None, color=None):
pt2d = np.reshape(pt2d,[-1,2]) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
assert pt2d.shape[0]==19
print("Vis_Skeleton_2D_H36m: {} joints".format(16))
#Without Nose
link2D = [ [0,1], [0,2], #neck -> nose, neck-> bodyCenter
[0,3], [3,4], [4,5], #Left Arm
[2,6], [6,7], [7,8], #left leg
[2,12],[12,13], [13,14], #Right leg
[0,9], [9, 10], [10, 11], #Right Arm
[1, 15], [15, 16], #left eye
[1, 17], [17, 18]] #right eye
bLeft = [ 1,1,
1, 1, 1,
1,1,1,
0,0,0,
0,0,0,
1,1,
0,0]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
return image
#Panoptic Studio SMC19 ordering
def Vis_Skeleton_2D_SMC19(pt2d, image = None, color=None):
pt2d = np.reshape(pt2d,[-1,2]) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
assert pt2d.shape[0]==19
print("Vis_Skeleton_2D_H36m: {} joints".format(16))
#Without Nose
link2D = [ [0,1], [0,2], #neck -> nose, neck-> bodyCenter
[0,3], [3,4], [4,5], #Left Arm
[2,6], [6,7], [7,8], #left leg
[2,12],[12,13], [13,14], #Right leg
[0,9], [9, 10], [10, 11], #Right Arm
[1, 15], [15, 16], #left eye
[1, 17], [17, 18]] #right eye
bLeft = [ 1,1,
1, 1, 1,
1,1,1,
0,0,0,
0,0,0,
1,1,
0,0]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
return image
def Vis_Skeleton_2D_Hand(pt2d, image = None, color=None):
pt2d = np.reshape(pt2d,[-1,2]) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
# assert pt2d.shape[0]==19
print("Vis_Skeleton_2D_H36m: {} joints".format(16))
#Without Nose
link2D = [ [0,1], [1,2], [2,3], [3,4], #thumb
[0,5], [5,6],[6,7],[7,8], #index
[0,9],[9,10],[10,11],[11,12],
[0,13],[13,14],[14,15],[15,16],
[0,17],[17,18],[18,19],[19,20]
]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if True:#bLeft[k]:
c = (0,255,255)#BGR, RED
# else:
# c = (0,0,0)
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
return image
#smplCOCO19 or MTC20
#smplCOCO: (19,2)
#MTC: (20,2)
#pt2d: (36,)
#pt2d_visibility: (18,)
def Vis_Skeleton_2D_smplCOCO(pt2d, pt2d_visibility=None, image = None, blankImSize = 1000, bVis = False, color=None, bBoxWidth=None):
pt2d = np.reshape(pt2d,[-1,2]) #Just in case. Make sure (32, 2)
if pt2d_visibility is not None and len(pt2d_visibility) == len(pt2d)*2:
pt2d_visibility = pt2d_visibility[::2]
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((blankImSize,blankImSize,3),np.uint8) *255
radius = 4
if(pt2d.shape[0]==19 or pt2d.shape[0]==20):
# print("Vis_Skeleton_2D_smplCOCO: {} joints".format(16))
#Without Nose
link2D = [ [12,2], [2,1], [1,0], #Right leg
[12,3], [3,4], [4,5], #Left leg
[12,9], [9,10], [10,11], #Left Arm
[12,8], [8,7], [7,6], #Right shoulder
[12,14],[14,16],[16,18], #Neck(12)->Nose(14)->rightEye(16)->rightEar(18)
[14,15],[15,17], #Nose(14)->leftEye(15)->leftEar(17).
[14,13] #Nose->headTop(13)
]
bLeft = [ 0,0,0,
1, 1, 1,
0,0,0,
1,1,1,
1,1,1,
11,0,11,0]
elif(pt2d.shape[0]==18): #No head (13)
# print("Vis_Skeleton_2D_smplCOCO: {} joints".format(16))
#Without Nose
link2D = [ [12,2], [2,1], [1,0], #Right leg
[12,3], [3,4], [4,5], #Left leg
[12,9], [9,10], [10,11], #Left Arm
[12,8], [8,7], [7,6], #Right shoulder
[12,13],[13,15],[15,17], #Neck(12)->Nose(14)->rightEye(16)->rightEar(18)
[13,14],[14,16] #Nose(14)->leftEye(15)->leftEar(17).
# [14,13] #Nose->headTop(13)
]
bLeft = [ 0,0,0,
1, 1, 1,
1,1,1,
0,0,0,
1,0,0,
1,1]
elif(pt2d.shape[0]==26): #SMPLCOCO totalCpa26
#Without Nose
link2D = [ [12,2], [2,1], [1,0], #Right leg
[12,3], [3,4], [4,5], #Left leg
[12,9], [9,10], [10,11], #Left Arm
[12,8], [8,7], [7,6], #Right shoulder
[12,14],[14,16],[16,18], #Neck(12)->Nose(14)->rightEye(16)->rightEar(18)
[14,15],[15,17], #Nose(14)->leftEye(15)->leftEar(17).
# [14,13], #Nose->headMidle(13)
[12,19], #headTop19
[5,20], [5,21], [5,22], #leftFoot
[0,23], [0,24], [0,25] #rightFoot
]
bLeft = [ 0,0,0,
1, 1, 1,
1,1,1,
0,0,0,
1,0,0,
1,1,
1,
1,1,1,
0,0,0]
else:
assert False
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
if bBoxWidth is not None:
image = Vis_Bbox_minmaxPt(image, [0,0], [bBoxWidth,bBoxWidth])
if bVis:
ImShow(image,name='Vis_Skeleton_2D_smplCOCO')
return image
def Vis_Skeleton_2D_smpl24(pt2d, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#SMPL 24 joints used for LBS
link2D = [ [0,3],[3,6],[6,9],[9,12],[12,15], #root-> torso -> head
[9,13],[13,16],[16,18],[18,20],[20,22], #Nect-> left hand
[9,14], [14,17], [17,19], [19,21], [21,23], #Nect-> right hand
[0,1], [1,4], [4,7], [7,10], # left Leg
[0,2], [2,5], [5,8], [8,11] #right leg
]
bLeft = [ 0,0,0,
1, 1, 1,
0,0,0,
1,1,1,
1,1,1,
11,0,11,0]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if True:#bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
if bVis:
ImShow(image)
return image
def Vis_Skeleton_2D_smpl45(pt2d, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#SMPL 24 joints used for LBS
link2D = [ [0,3],[3,6],[6,9],[9,12],[12,15], #root-> torso -> head
[9,13],[13,16],[16,18],[18,20],[20,22], #Nect-> left hand
[9,14], [14,17], [17,19], [19,21], [21,23], #Nect-> right hand
[0,1], [1,4], [4,7], [7,10], # left Leg
[0,2], [2,5], [5,8], [8,11] #right leg
]
bLeft = [ 0,0,0,
1, 1, 1,
0,0,0,
1,1,1,
1,1,1,
11,0,11,0]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if True:#bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
if bVis:
ImShow(image)
return image
def Vis_Skeleton_2D_MPII(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#SMPL 24 joints used for LBS
link2D = [ [6,7],[7,8],[8,9], #root-> torso -> head
[7,12], [12,11],[11,10], #right arm
[7,13], [13,14], [14,15], #left arm
[6,2],[2,1], [1,0], #right leg
[6,3], [3,4], [4,5] #left leg
]
bLeft = [ 1,1,1,
0, 0, 0,
1,1,1,
0,0,0,
1,1,1]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
if bVis:
ImShow(image)
return image
def Vis_Skeleton_2D_foot(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#SMPL 24 joints used for LBS
link2D = [ [0,1],[1,2], #root-> torso -> head
[3,4], [4,5] ]
bLeft = [ 1,1,
0, 0]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
for k in np.arange( len(link2D) ):
parent = link2D[k][0]
child = link2D[k][1]
if color is not None:
c = color
else:
if bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
if bVis:
ImShow(image)
return image
def Vis_Skeleton_2D_Openpose25(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
if pt2d.shape[0]==49: #SPIN 25 (openpose) + 24 (superset) joint
return Vis_Skeleton_2D_SPIN49(pt2d, pt2d_visibility, image, bVis, color)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#Openpose25
link_openpose = [ [8,1], [1,0] , [0,16] , [16,18] , [0,15], [15,17],
[1,2],[2,3],[3,4], #Right Arm
[1,5], [5,6], [6,7], #Left Arm
[8,12], [12,13], [13,14], [14,21], [14,19], [14,20],
[8,9], [9,10], [10,11], [11,24], [11,22], [11,23]
]
bLeft = [ 1,1,1,1,0,0,
0,0,0,
1,1,1,
1,1,1,1,1,1,
0,0,0,0,0,0]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
#Openpose joint drawn as blue
for k in np.arange( len(link_openpose) ):
parent = link_openpose[k][0]
child = link_openpose[k][1]
if color is not None:
c = color
else:
if bLeft[k]:
c = (255,0,0)#BGR, Blue
else:
c = (0,0,0) #Right Black
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
return image
def Vis_Skeleton_2D_Openpose_hand(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#Openpose25
link_openpose = [ [0,1], [1,2], [2,3], [3,4], #thumb
[0,5], [5,6],[6,7],[7,8], #index
[0,9],[9,10],[10,11],[11,12],
[0,13],[13,14],[14,15],[15,16],
[0,17],[17,18],[18,19],[19,20]
]
link_openpose = np.array(link_openpose)
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
#Openpose joint drawn as blue
for k in np.arange( len(link_openpose) ):
parent = link_openpose[k][0]
child = link_openpose[k][1]
if color is not None:
c = color
else:
c = (255,0,0)#BGR, Blue
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
return image
def Vis_Skeleton_2D_Openpose18(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#Openpose18
link_openpose = [ [1,0] , [0,14] , [14,16] , [0,15], [15,17],
[1,2],[2,3],[3,4], #Right Arm
[1,5], [5,6], [6,7], #Left Arm
[1,11], [11,12], [12,13], #Left Leg
[8,1], [8,9], [9,10] #Right Leg
]
bLeft = [ 1,1,1,1,1,
0,0,0,
1,1,1,
1,1,1,
0,0,0]
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
#Openpose joint drawn as blue
for k in np.arange( len(link_openpose) ):
parent = link_openpose[k][0]
child = link_openpose[k][1]
if color is not None:
c = color
else:
if bLeft[k]:
c = (255,0,0)#BGR, Blue
else:
c = (0,0,0)
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
return image
def Vis_Skeleton_2D_SPIN24(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#Openpose25 in Spin Defition + SPIN global 24
# 'OP Nose', 'OP Neck', 'OP RShoulder', #0,1,2
# 'OP RElbow', 'OP RWrist', 'OP LShoulder', #3,4,5
# 'OP LElbow', 'OP LWrist', 'OP MidHip', #6, 7,8
# 'OP RHip', 'OP RKnee', 'OP RAnkle', #9,10,11
# 'OP LHip', 'OP LKnee', 'OP LAnkle', #12,13,14
# 'OP REye', 'OP LEye', 'OP REar', #15,16,17
# 'OP LEar', 'OP LBigToe', 'OP LSmallToe', #18,19,20
# 'OP LHeel', 'OP RBigToe', 'OP RSmallToe', 'OP RHeel', #21, 22, 23, 24 ##Total 25 joints for openpose
link_openpose = [ [8,1], [1,0] , [0,16] , [16,18] , [0,15], [15,17],
[1,2],[2,3],[3,4], #Right Arm
[1,5], [5,6], [6,7], #Left Arm
[8,12], [12,13], [13,14], [14,19], [19,20], [20,21], #Left Leg
[8,9], [9,10], [10,11], [11,22], [22,23], [23,24] #Right left
]
link_spin24 =[ [14,16], [16,12], [12,17] , [17,18] ,
[12,9],[9,10],[10,11], #Right Arm
[12,8], [8,7], [7,6], #Left Arm
[14,3], [3,4], [4,5],
[14,2], [2,1], [1,0]]
link_spin24 = np.array(link_spin24) + 25
# bLeft = [ 1,1,1,1,0,0,
# 0,0,0,
# 1,1,1,
# 1,1,1,1,1,1,
# 0,0,0,0,0,0]
bLeft = [ 0,0,0,0,
1,1,1,
0,0,0,
1,1,1,
0,0,0]
# for i in np.arange( len(link) ):
for k in np.arange( 25,len(pt2d) ):
if color is not None:
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, color,-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, color,-1)
else:
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,0,255),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,0,255),-1)
# # #Openpose joint drawn as blue
# for k in np.arange( len(link_openpose) ):
# parent = link_openpose[k][0]
# child = link_openpose[k][1]
# if color is not None:
# c = color
# else:
# if True:#bLeft[k]:
# c = (255,0,0)#BGR, Blue
# else:
# c = (0,0,0)
# if pt2d_visibility is None:
# cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
# else:
# if pt2d_visibility[parent] and pt2d_visibility[child]:
# cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
#SPIN24 joint drawn as red
for k in np.arange( len(link_spin24) ):
parent = link_spin24[k][0]
child = link_spin24[k][1]
if color is not None:
c = color
else:
if True:#bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
if bVis:
ImShow(image)
return image
def Vis_Skeleton_2D_SPIN49(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
#Openpose25 in Spin Defition + SPIN global 24
# 'OP Nose', 'OP Neck', 'OP RShoulder', #0,1,2
# 'OP RElbow', 'OP RWrist', 'OP LShoulder', #3,4,5
# 'OP LElbow', 'OP LWrist', 'OP MidHip', #6, 7,8
# 'OP RHip', 'OP RKnee', 'OP RAnkle', #9,10,11
# 'OP LHip', 'OP LKnee', 'OP LAnkle', #12,13,14
# 'OP REye', 'OP LEye', 'OP REar', #15,16,17
# 'OP LEar', 'OP LBigToe', 'OP LSmallToe', #18,19,20
# 'OP LHeel', 'OP RBigToe', 'OP RSmallToe', 'OP RHeel', #21, 22, 23, 24 ##Total 25 joints for openpose
link_openpose = [ [8,1], [1,0] , [0,16] , [16,18] , [0,15], [15,17],
[1,2],[2,3],[3,4], #Right Arm
[1,5], [5,6], [6,7], #Left Arm
[8,12], [12,13], [13,14], [14,19], [19,20], [20,21], #Left Leg
[8,9], [9,10], [10,11], [11,22], [22,23], [23,24] #Right left
]
link_spin24 =[ [14,16], [16,12], [12,17] , [17,18] ,
[12,9],[9,10],[10,11], #Right Arm
[12,8], [8,7], [7,6], #Left Arm
[14,3], [3,4], [4,5],
[14,2], [2,1], [1,0]]
link_spin24 = np.array(link_spin24) + 25
# bLeft = [ 1,1,1,1,0,0,
# 0,0,0,
# 1,1,1,
# 1,1,1,1,1,1,
# 0,0,0,0,0,0]
bLeft = [ 0,0,0,0,
1,1,1,
0,0,0,
1,1,1,
0,0,0]
# for i in np.arange( len(link) ):
for k in np.arange( 25,len(pt2d) ):
if color is not None:
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, color,-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, color,-1)
else:
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,0,255),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,0,255),-1)
# #Openpose joint drawn as blue
for k in np.arange( len(link_openpose) ):
parent = link_openpose[k][0]
child = link_openpose[k][1]
if color is not None:
c = color
else:
if True:#bLeft[k]:
c = (255,0,0)#BGR, Blue
else:
c = (0,0,0)
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
#SPIN24 joint drawn as red
for k in np.arange( len(link_spin24) ):
parent = link_spin24[k][0]
child = link_spin24[k][1]
if color is not None:
c = color
else:
if True:#bLeft[k]:
c = (0,0,255)#BGR, RED
else:
c = (0,0,0)
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
if bVis:
ImShow(image)
return image
def Vis_Skeleton_2D_coco(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None , offsetXY =None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
# 'OP RHip', 'OP RKnee', 'OP RAnkle', #9,10,11
# 'OP LHip', 'OP LKnee', 'OP LAnkle', #12,13,14
# 'OP REye', 'OP LEye', 'OP REar', #15,16,17
# 'OP LEar', 'OP LBigToe', 'OP LSmallToe', #18,19,20
# 'OP LHeel', 'OP RBigToe', 'OP RSmallToe', 'OP RHeel', #21, 22, 23, 24 ##Total 25 joints for openpose
link_coco = [ [0,1], [1,3] , [0,2] , [2,4],
[6,8],[8,10], #Right Arm
[5,7], [7,9], #Left Arm
[15,13], [13,11], [11,5], #Left Leg
[16,14], [14,12], [12,6], #Right left
]
for k in np.arange( len(pt2d) ):
if color is not None:
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, color,-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, color,-1)
else:
if pt2d_visibility is None:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,0,255),-1)
else:
if pt2d_visibility[k]:
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,0,255),-1)
# # #Openpose joint drawn as blue
for k in np.arange( len(link_coco) ):
parent = link_coco[k][0]
child = link_coco[k][1]
if color is not None:
c = color
else:
if True:#bLeft[k]:
c = (255,0,0)#BGR, Blue
else:
c = (0,0,0)
if pt2d_visibility is None:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
else:
if pt2d_visibility[parent] and pt2d_visibility[child]:
cv2.line(image, (int(pt2d[parent][0]), int(pt2d[parent][1])), (int(pt2d[child][0]), int(pt2d[child][1])), c, radius - 2)
if bVis:
ImShow(image)
return image
def Vis_Skeleton_2D_general(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None , offsetXY =None):
pt2d = np.reshape(pt2d,(-1,2)) #Just in case. Make sure (32, 2)
if offsetXY is not None:
pt2d = pt2d + np.array(offsetXY)
if pt2d.shape[0]==49: #SPIN 25 (openpose) + 24 (superset) joint
return Vis_Skeleton_2D_SPIN49(pt2d, pt2d_visibility, image, bVis, color)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
if bVis:
ImShow(image)
return image
def Vis_Skeleton_3Dto2D_general(pt2d, pt2d_visibility = None, image = None, bVis = False, color=None, offsetXY =None):
pt2d = np.reshape(pt2d,(-1,3)) #Just in case. Make sure (32, 2)
if pt2d.shape[0]==49: #SPIN 25 (openpose) + 24 (superset) joint
return Vis_Skeleton_2D_SPIN49(pt2d, pt2d_visibility, image, bVis, color)
#Draw via opencv
if not isinstance(image, np.ndarray):#not image: #If no image is given, generate Blank image
image = np.ones((1000,1000,3),np.uint8) *255
radius = 4
# for i in np.arange( len(link) ):
for k in np.arange( len(pt2d) ):
cv2.circle(image, (int(pt2d[k][0]), int(pt2d[k][1]) ), radius, (0,255,0),-1)
if bVis:
ImShow(image)
return image
#H36m skeleton32
# def Vis_Skeleton_H36m16(inputImg, coco_annot):
| 34.256975
| 167
| 0.511895
| 6,974
| 46,658
| 3.366217
| 0.057643
| 0.015164
| 0.015207
| 0.014653
| 0.805844
| 0.787272
| 0.767976
| 0.749404
| 0.745144
| 0.743866
| 0
| 0.117519
| 0.309165
| 46,658
| 1,361
| 168
| 34.282145
| 0.610803
| 0.22487
| 0
| 0.740654
| 0
| 0
| 0.020012
| 0.005374
| 0
| 0
| 0
| 0.000735
| 0.003505
| 1
| 0.031542
| false
| 0
| 0.007009
| 0.001168
| 0.07243
| 0.010514
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58c0712e18120b6b6567dcb6fb407d73f2555a4d
| 113
|
py
|
Python
|
gym_rover/envs/__init__.py
|
eklinkhammer/gym-rover
|
38ed96f6f7ea4e5fed4432cb3967d2317caf145e
|
[
"MIT"
] | null | null | null |
gym_rover/envs/__init__.py
|
eklinkhammer/gym-rover
|
38ed96f6f7ea4e5fed4432cb3967d2317caf145e
|
[
"MIT"
] | null | null | null |
gym_rover/envs/__init__.py
|
eklinkhammer/gym-rover
|
38ed96f6f7ea4e5fed4432cb3967d2317caf145e
|
[
"MIT"
] | null | null | null |
from gym_rover.envs.rover_env import RoverEnv
from gym_rover.envs.rover_cont_feature_env import RoverContFeature
| 37.666667
| 66
| 0.893805
| 18
| 113
| 5.277778
| 0.555556
| 0.147368
| 0.252632
| 0.336842
| 0.442105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070796
| 113
| 2
| 67
| 56.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
450378560bd48b3004fd9291c406dc7b98876601
| 68,615
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/ratio_based_results/EightThreads_fmm/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/ratio_based_results/EightThreads_fmm/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/ratio_based_results/EightThreads_fmm/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.479039,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.578947,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 2.43065,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 1.00972,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.74847,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 1.0028,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.76099,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.625412,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 10.5972,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.459203,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0366031,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.450386,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.270703,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.909589,
'Execution Unit/Register Files/Runtime Dynamic': 0.307306,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 1.21956,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 2.39307,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 7.44569,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00341119,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00341119,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00295342,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00113362,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00388867,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0136645,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0333394,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.260233,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.654692,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.88387,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.8458,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0203623,
'L2/Runtime Dynamic': 0.00535402,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.09106,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.37633,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0923316,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0923316,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.52884,
'Load Store Unit/Runtime Dynamic': 1.92401,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.227674,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.455348,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0808023,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0811066,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.10733,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.698185,
'Memory Management Unit/Runtime Dynamic': 0.188437,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 29.3751,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.60205,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0709095,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.489926,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 2.16289,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 13.5722,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.128385,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.303528,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.649066,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.233283,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.376276,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.189932,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.79949,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.167297,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.31928,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.122622,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00978492,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.120622,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0723655,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.243244,
'Execution Unit/Register Files/Runtime Dynamic': 0.0821504,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.28622,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.565324,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.15587,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00115923,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00115923,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00106589,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000443362,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00103954,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00442388,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00910653,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0695668,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.42505,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.176565,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.23628,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.85832,
'Instruction Fetch Unit/Runtime Dynamic': 0.495942,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00685529,
'L2/Runtime Dynamic': 0.00211392,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.02168,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.379413,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0253823,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0253822,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.14154,
'Load Store Unit/Runtime Dynamic': 0.529972,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0625885,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.125177,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0222129,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0223152,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.275133,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0289464,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.4694,
'Memory Management Unit/Runtime Dynamic': 0.0512616,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.3849,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.322564,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0144506,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.111555,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.44857,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.68373,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.101266,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.282228,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.513423,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.184383,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.297404,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.150119,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.631907,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.132165,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.00528,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0969966,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00773387,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0951975,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0571967,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.192194,
'Execution Unit/Register Files/Runtime Dynamic': 0.0649306,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.225877,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.446203,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.83065,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000909979,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000909979,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000838712,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000349904,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000821635,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00348031,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00707696,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0549847,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.4975,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.138573,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.186753,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.88575,
'Instruction Fetch Unit/Runtime Dynamic': 0.390868,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00475341,
'L2/Runtime Dynamic': 0.00145611,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.84397,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.293174,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0196332,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0196331,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.93669,
'Load Store Unit/Runtime Dynamic': 0.409631,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.048412,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.0968238,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0171816,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0172526,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.217461,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0227178,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.403085,
'Memory Management Unit/Runtime Dynamic': 0.0399704,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.825,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.255154,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.011424,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.088196,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.354774,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.02734,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.101283,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.282241,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.513519,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.184407,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.297441,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.150138,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.631986,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.132177,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.00547,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0970147,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00773484,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0952107,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0572039,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.192225,
'Execution Unit/Register Files/Runtime Dynamic': 0.0649387,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.225909,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.446257,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.8308,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000910016,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000910016,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000838741,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000349914,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000821739,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00348051,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0070774,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0549916,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.49793,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.138603,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.186776,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.88621,
'Instruction Fetch Unit/Runtime Dynamic': 0.390929,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00470251,
'L2/Runtime Dynamic': 0.00142649,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.84382,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.293056,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0196282,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0196283,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.93651,
'Load Store Unit/Runtime Dynamic': 0.409484,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0483998,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.0968001,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0171772,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0172477,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.217489,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0227227,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.403105,
'Memory Management Unit/Runtime Dynamic': 0.0399703,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.8255,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.255201,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0114257,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0882066,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.354833,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.02744,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.6143286852043806,
'Runtime Dynamic': 0.6143286852043806,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0411791,
'Runtime Dynamic': 0.0230206,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 81.4516,
'Peak Power': 114.564,
'Runtime Dynamic': 23.3337,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 81.4104,
'Total Cores/Runtime Dynamic': 23.3107,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.0411791,
'Total L3s/Runtime Dynamic': 0.0230206,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.071116
| 124
| 0.68211
| 8,082
| 68,615
| 5.785078
| 0.067805
| 0.123538
| 0.112929
| 0.093423
| 0.938188
| 0.930125
| 0.917485
| 0.883991
| 0.860592
| 0.840787
| 0
| 0.132046
| 0.22431
| 68,615
| 914
| 125
| 75.071116
| 0.746411
| 0
| 0
| 0.642232
| 0
| 0
| 0.657354
| 0.048094
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
451c6045eaa9f0ebde5dd84e99563a57076003cd
| 122
|
py
|
Python
|
pompom/apps/huddle_board/utils.py
|
AsanteHS/Pompom
|
ef660f5944439f3de9f4d864b91d02edbddf207b
|
[
"MIT"
] | 1
|
2017-09-01T23:17:28.000Z
|
2017-09-01T23:17:28.000Z
|
pompom/apps/huddle_board/utils.py
|
AsanteHS/Pompom
|
ef660f5944439f3de9f4d864b91d02edbddf207b
|
[
"MIT"
] | 1
|
2018-05-16T02:43:14.000Z
|
2018-05-16T02:43:14.000Z
|
pompom/apps/huddle_board/utils.py
|
AsanteHS/Pompom
|
ef660f5944439f3de9f4d864b91d02edbddf207b
|
[
"MIT"
] | 1
|
2018-05-15T12:49:51.000Z
|
2018-05-15T12:49:51.000Z
|
def truncate_string(text, max_length=50):
return (text[:max_length - 3] + '...') if len(text) > max_length else text
| 30.5
| 78
| 0.672131
| 19
| 122
| 4.105263
| 0.631579
| 0.269231
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.163934
| 122
| 3
| 79
| 40.666667
| 0.735294
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
188fcae31a307ac0095903de37168767b5801a5e
| 14,322
|
py
|
Python
|
src/OTLMOW/OTLModel/Datatypes/KlLEKantopsluitingBijkomendeParameter.py
|
davidvlaminck/OTLClassPython
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | 2
|
2022-02-01T08:58:11.000Z
|
2022-02-08T13:35:17.000Z
|
src/OTLMOW/OTLModel/Datatypes/KlLEKantopsluitingBijkomendeParameter.py
|
davidvlaminck/OTLMOW
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | null | null | null |
src/OTLMOW/OTLModel/Datatypes/KlLEKantopsluitingBijkomendeParameter.py
|
davidvlaminck/OTLMOW
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from OTLMOW.OTLModel.Datatypes.KeuzelijstField import KeuzelijstField
from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde
# Generated with OTLEnumerationCreator. To modify: extend, do not edit
class KlLEKantopsluitingBijkomendeParameter(KeuzelijstField):
"""Gedetailleerder typeren van de kantopsluiting."""
naam = 'KlLEKantopsluitingBijkomendeParameter'
label = 'Kantopsluiting bijkomende parameter'
objectUri = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#KlLEKantopsluitingBijkomendeParameter'
definition = 'Gedetailleerder typeren van de kantopsluiting.'
codelist = 'https://wegenenverkeer.data.vlaanderen.be/id/conceptscheme/KlLEKantopsluitingBijkomendeParameter'
options = {
'eindschikking-voor-schampkant': KeuzelijstWaarde(invulwaarde='eindschikking-voor-schampkant',
label='eindschikking voor schampkant',
definitie='eindschikking voor schampkant',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/eindschikking-voor-schampkant'),
'gebogen-kantstrook-of-watergreppel-straal-groter-5m': KeuzelijstWaarde(invulwaarde='gebogen-kantstrook-of-watergreppel-straal-groter-5m',
label='gebogen kantstrook of watergreppel straal groter 5m',
definitie='gebogen kantstrook of watergreppel, met straal groter dan 5m',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/gebogen-kantstrook-of-watergreppel-straal-groter-5m'),
'gebogen-kantstrook-of-watergreppel-straal-kleiner-of-gelijk-5m': KeuzelijstWaarde(invulwaarde='gebogen-kantstrook-of-watergreppel-straal-kleiner-of-gelijk-5m',
label='gebogen kantstrook of watergreppel straal kleiner of gelijk 5m',
definitie='gebogen kantstrook of watergreppel, met straal kleiner of gelijk 5m',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/gebogen-kantstrook-of-watergreppel-straal-kleiner-of-gelijk-5m'),
'gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m': KeuzelijstWaarde(invulwaarde='gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m',
label='gebogen trottoirband of Trottoirband-watergreppel straal groter 5m',
definitie='Gebogen trottoirband of trottoirband-watergreppel, met straal groter dan 5 m.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m'),
'gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m-reflectoren-schuin': KeuzelijstWaarde(invulwaarde='gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m-reflectoren-schuin',
label='gebogen trottoirband of Trottoirband-watergreppel straal groter 5m reflectoren schuin',
definitie='Gebogen trottoirband of trottoirband-watergreppel, met straal groter dan 5 m met schuin geplaatste reflectoren.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m-reflectoren-schuin'),
'gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m-reflectoren-verticaal': KeuzelijstWaarde(invulwaarde='gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m-reflectoren-verticaal',
label='gebogen trottoirband of Trottoirband-watergreppel straal groter 5m reflectoren verticaal',
definitie='Gebogen trottoirband of trottoirband-watergreppel, met straal groter dan 5 m met verticaal geplaatste reflectoren.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/gebogen-trottoirband-of-Trottoirband-watergreppel-straal-groter-5m-reflectoren-verticaal'),
'gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m': KeuzelijstWaarde(invulwaarde='gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m',
label='gebogen trottoirband of Trottoirband-watergreppel straal kleiner of gelijk 5m',
definitie='gebogen trottoirband of trottoirband-watergreppel, met straal kleiner of gelijk aan 5m',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m'),
'gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m-reflectoren-schuin': KeuzelijstWaarde(invulwaarde='gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m-reflectoren-schuin',
label='gebogen trottoirband of Trottoirband-watergreppel straal kleiner of gelijk 5m reflectoren schuin',
definitie='Gebogen trottoirband of trottoirband-watergreppel, met straal kleiner of gelijk aan 5m en schuin geplaatste reflectoren.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m-reflectoren-schuin'),
'gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m-reflectoren-verticaal': KeuzelijstWaarde(invulwaarde='gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m-reflectoren-verticaal',
label='gebogen trottoirband of Trottoirband-watergreppel straal kleiner of gelijk 5m reflectoren verticaal',
definitie='Gebogen trottoirband of trottoirband-watergreppel, met straal kleiner of gelijk aan 5m met verticaal geplaatste reflectoren.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/gebogen-trottoirband-of-Trottoirband-watergreppel-straal-kleiner-of-gelijk-5m-reflectoren-verticaal'),
'hoekstukken.-hoek-90°-of-270°': KeuzelijstWaarde(invulwaarde='hoekstukken.-hoek-90°-of-270°',
label='hoekstukken. hoek 90° of 270°',
definitie='hoekstukken waarbij de hoek gelijk is aan 90° of 270°',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/hoekstukken.-hoek-90°-of-270°'),
'hoekstukken.-hoek-verschillend-90°-of-270°': KeuzelijstWaarde(invulwaarde='hoekstukken.-hoek-verschillend-90°-of-270°',
label='hoekstukken. hoek verschillend 90° of 270°',
definitie='Hoekstukken, hoek verschillend van 90° of 270°',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/hoekstukken.-hoek-verschillend-90°-of-270°'),
'overgangstrottoirbanden': KeuzelijstWaarde(invulwaarde='overgangstrottoirbanden',
label='overgangstrottoirbanden',
definitie='Overgangstrottoirbanden',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/overgangstrottoirbanden'),
'rechte-kantstrook-of-watergreppel': KeuzelijstWaarde(invulwaarde='rechte-kantstrook-of-watergreppel',
label='rechte kantstrook of watergreppel',
definitie='Rechte kantstrook of watergreppel.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/rechte-kantstrook-of-watergreppel'),
'rechte-schampkant': KeuzelijstWaarde(invulwaarde='rechte-schampkant',
label='rechte schampkant',
definitie='rechte schampkant',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/rechte-schampkant'),
'rechte-trottoirband-of-Trottoirband-watergreppel-reflectoren-schuin': KeuzelijstWaarde(invulwaarde='rechte-trottoirband-of-Trottoirband-watergreppel-reflectoren-schuin',
label='rechte trottoirband of Trottoirband-watergreppel reflectoren schuin',
definitie='Rechte trottoirband of trottoirband-watergreppel met schuin geplaatste reflectoren.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/rechte-trottoirband-of-Trottoirband-watergreppel-reflectoren-schuin'),
'rechte-trottoirband-of-trottoirband-watergreppel': KeuzelijstWaarde(invulwaarde='rechte-trottoirband-of-trottoirband-watergreppel',
label='rechte trottoirband of trottoirband-watergreppel',
definitie='Rechte trottoirband of trottoirband-watergreppel.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/rechte-trottoirband-of-trottoirband-watergreppel'),
'rechte-trottoirband-of-trottoirbandwater-greppel-reflectoren-verticaal': KeuzelijstWaarde(invulwaarde='rechte-trottoirband-of-trottoirbandwater-greppel-reflectoren-verticaal',
label='rechte trottoirband of trottoirbandwater-greppel reflectoren verticaal',
definitie='Rechte trottoirband of trottoirband-watergreppel met verticaal geplaatste reflectoren.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/rechte-trottoirband-of-trottoirbandwater-greppel-reflectoren-verticaal'),
'supplement-voor-in-verstek-zagen': KeuzelijstWaarde(invulwaarde='supplement-voor-in-verstek-zagen',
label='supplement voor in verstek zagen',
definitie='supplement voor in verstek zagen',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/supplement-voor-in-verstek-zagen'),
'trottoirbanden-voor-minder-validen': KeuzelijstWaarde(invulwaarde='trottoirbanden-voor-minder-validen',
label='trottoirbanden voor minder-validen',
definitie='trottoirbanden voor minder-validen',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlLEKantopsluitingBijkomendeParameter/trottoirbanden-voor-minder-validen')
}
| 154
| 332
| 0.553065
| 1,003
| 14,322
| 7.917248
| 0.087737
| 0.079335
| 0.13424
| 0.196197
| 0.83113
| 0.793099
| 0.769047
| 0.665785
| 0.626747
| 0.547034
| 0
| 0.010157
| 0.374459
| 14,322
| 92
| 333
| 155.673913
| 0.873981
| 0.009007
| 0
| 0
| 1
| 0.244186
| 0.526754
| 0.183433
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.104651
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e18e8551a78c6a3e3618e9a7982341796593028a
| 3,973
|
py
|
Python
|
test/transforms/test_gdc.py
|
LingxiaoShawn/pytorch_geometric
|
50b7bfc4a59b5b6f7ec547ff862985f3b2e22798
|
[
"MIT"
] | 1
|
2022-02-21T13:23:19.000Z
|
2022-02-21T13:23:19.000Z
|
test/transforms/test_gdc.py
|
LingxiaoShawn/pytorch_geometric
|
50b7bfc4a59b5b6f7ec547ff862985f3b2e22798
|
[
"MIT"
] | null | null | null |
test/transforms/test_gdc.py
|
LingxiaoShawn/pytorch_geometric
|
50b7bfc4a59b5b6f7ec547ff862985f3b2e22798
|
[
"MIT"
] | null | null | null |
import torch
from torch_geometric.data import Data
from torch_geometric.transforms import GDC
from torch_geometric.utils import to_dense_adj
def test_gdc():
edge_index = torch.tensor([[0, 0, 1, 1, 2, 2, 2, 3, 3, 4],
[1, 2, 0, 2, 0, 1, 3, 2, 4, 3]])
data = Data(edge_index=edge_index, num_nodes=5)
gdc = GDC(self_loop_weight=1, normalization_in='sym',
normalization_out='sym',
diffusion_kwargs=dict(method='ppr', alpha=0.15),
sparsification_kwargs=dict(method='threshold',
avg_degree=2), exact=True)
data = gdc(data)
mat = to_dense_adj(data.edge_index, edge_attr=data.edge_attr).squeeze()
assert torch.all(mat >= -1e-8)
assert torch.allclose(mat, mat.t(), atol=1e-4)
data = Data(edge_index=edge_index, num_nodes=5)
gdc = GDC(self_loop_weight=1, normalization_in='sym',
normalization_out='sym',
diffusion_kwargs=dict(method='heat', t=10),
sparsification_kwargs=dict(method='threshold',
avg_degree=2), exact=True)
data = gdc(data)
mat = to_dense_adj(data.edge_index, edge_attr=data.edge_attr).squeeze()
assert torch.all(mat >= -1e-8)
assert torch.allclose(mat, mat.t(), atol=1e-4)
data = Data(edge_index=edge_index, num_nodes=5)
gdc = GDC(self_loop_weight=1, normalization_in='col',
normalization_out='col',
diffusion_kwargs=dict(method='heat', t=10),
sparsification_kwargs=dict(method='topk', k=2,
dim=0), exact=True)
data = gdc(data)
mat = to_dense_adj(data.edge_index, edge_attr=data.edge_attr).squeeze()
col_sum = mat.sum(0)
assert torch.all(mat >= -1e-8)
assert torch.all(
torch.isclose(col_sum, torch.tensor(1.0))
| torch.isclose(col_sum, torch.tensor(0.0)))
assert torch.all((~torch.isclose(mat, torch.tensor(0.0))).sum(0) == 2)
data = Data(edge_index=edge_index, num_nodes=5)
gdc = GDC(self_loop_weight=1, normalization_in='row',
normalization_out='row',
diffusion_kwargs=dict(method='heat', t=5),
sparsification_kwargs=dict(method='topk', k=2,
dim=1), exact=True)
data = gdc(data)
mat = to_dense_adj(data.edge_index, edge_attr=data.edge_attr).squeeze()
row_sum = mat.sum(1)
assert torch.all(mat >= -1e-8)
assert torch.all(
torch.isclose(row_sum, torch.tensor(1.0))
| torch.isclose(row_sum, torch.tensor(0.0)))
assert torch.all((~torch.isclose(mat, torch.tensor(0.0))).sum(1) == 2)
data = Data(edge_index=edge_index, num_nodes=5)
gdc = GDC(self_loop_weight=1, normalization_in='row',
normalization_out='row',
diffusion_kwargs=dict(method='coeff', coeffs=[0.8, 0.3, 0.1]),
sparsification_kwargs=dict(method='threshold',
eps=0.1), exact=True)
data = gdc(data)
mat = to_dense_adj(data.edge_index, edge_attr=data.edge_attr).squeeze()
row_sum = mat.sum(1)
assert torch.all(mat >= -1e-8)
assert torch.all(
torch.isclose(row_sum, torch.tensor(1.0))
| torch.isclose(row_sum, torch.tensor(0.0)))
data = Data(edge_index=edge_index, num_nodes=5)
gdc = GDC(self_loop_weight=1, normalization_in='sym',
normalization_out='col',
diffusion_kwargs=dict(method='ppr', alpha=0.15, eps=1e-4),
sparsification_kwargs=dict(method='threshold',
avg_degree=2), exact=False)
data = gdc(data)
mat = to_dense_adj(data.edge_index, edge_attr=data.edge_attr).squeeze()
col_sum = mat.sum(0)
assert torch.all(mat >= -1e-8)
assert torch.all(
torch.isclose(col_sum, torch.tensor(1.0))
| torch.isclose(col_sum, torch.tensor(0.0)))
| 43.659341
| 76
| 0.602567
| 561
| 3,973
| 4.078431
| 0.11943
| 0.074738
| 0.068182
| 0.089161
| 0.909091
| 0.892045
| 0.88986
| 0.880245
| 0.838724
| 0.815122
| 0
| 0.035278
| 0.257991
| 3,973
| 90
| 77
| 44.144444
| 0.740841
| 0
| 0
| 0.777778
| 0
| 0
| 0.025925
| 0
| 0
| 0
| 0
| 0
| 0.17284
| 1
| 0.012346
| false
| 0
| 0.049383
| 0
| 0.061728
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e18f3c9359c86b6a08ea544457886c150d3ee386
| 30,056
|
py
|
Python
|
api/test/testModel.py
|
Major-League-Summer-Baseball/mlsb-platform
|
ecb2a6a15dcaa12c4e18a6d9c5d1b4caf83e05a4
|
[
"Apache-2.0"
] | 1
|
2021-04-22T02:06:33.000Z
|
2021-04-22T02:06:33.000Z
|
api/test/testModel.py
|
Major-League-Summer-Baseball/mlsb-platform
|
ecb2a6a15dcaa12c4e18a6d9c5d1b4caf83e05a4
|
[
"Apache-2.0"
] | 42
|
2021-03-12T23:18:30.000Z
|
2022-03-13T20:57:36.000Z
|
api/test/testModel.py
|
Major-League-Summer-Baseball/mlsb-platform
|
ecb2a6a15dcaa12c4e18a6d9c5d1b4caf83e05a4
|
[
"Apache-2.0"
] | 1
|
2019-04-21T23:24:54.000Z
|
2019-04-21T23:24:54.000Z
|
'''
@author: Dallas Fraser
@author: 2016-04-12
@organization: MLSB API
@summary: Holds the tests for the model
'''
from api.model import Player, Team, Bat, Sponsor, League, Game, Division,\
JoinLeagueRequest
from api.errors import InvalidField, PlayerDoesNotExist, TeamDoesNotExist,\
LeagueDoesNotExist, SponsorDoesNotExist,\
NonUniqueEmail, GameDoesNotExist, DivisionDoesNotExist,\
HaveLeagueRequestException
from api.test.BaseTest import TestSetup, INVALID_ID, VALID_YEAR
from sqlalchemy.orm import undefer
from datetime import datetime
import unittest
import uuid
class SponsorModelTest(TestSetup):
def testSponsorInit(self):
# valid data
sponsor_name = str(uuid.uuid1())
Sponsor(sponsor_name)
Sponsor(sponsor_name,
link="http://good-sponsor.ca",
description="Good Descript")
# now bad stuff
try:
Sponsor(1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
Sponsor(sponsor_name,
link=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
Sponsor(sponsor_name,
link="http://good-sponsor.ca",
description=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
def testSponsorUpdate(self):
# valid sponsor
sponsor_name = str(uuid.uuid1())
new_sponsor_name = str(uuid.uuid1())
s = Sponsor(sponsor_name)
# valid update
s.update(name=new_sponsor_name, link="New Link", description="new")
# now bad stuff
try:
s.update(name=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
s.update(sponsor_name,
link=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
s.update(sponsor_name,
link="http://good-sponsor.ca",
description=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
class PlayerModelTest(TestSetup):
def testPlayerInit(self):
player_name = str(uuid.uuid1())
email = player_name + "@mlsb.ca"
Player(player_name, email)
Player(player_name,
email,
gender="m",
password="Good password")
# now bad stuff
try:
Player(1, email)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
Player(player_name, 1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
some_email = str(uuid.uuid1()) + "@mlsb.ca"
self.add_player(str(uuid.uuid1()), some_email)
Player(str(uuid.uuid1()), some_email)
self.assertEqual(False, True, "Should raise email exception")
except NonUniqueEmail:
pass
try:
Player(player_name, email, gender="XX")
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
def testPlayerUpdate(self):
player_name = str(uuid.uuid1())
email = player_name + "@mlsb.ca"
new_player_name = str(uuid.uuid1())
new_email = new_player_name + "@mlsb.ca"
p1 = Player(player_name, email)
p1.update(name=new_player_name,
email=new_email,
gender="f",
password="n")
# now bad stuff
try:
p1.update(name=1, email=email)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
p1.update(name=player_name, email=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
some_email = str(uuid.uuid1()) + "@mlsb.ca"
self.add_player(str(uuid.uuid1()), some_email)
p1.update(name=str(uuid.uuid1()), email=some_email)
self.assertEqual(False, True, "Should raise email exception")
except NonUniqueEmail:
pass
try:
p1.update(name=player_name, email=email, gender="XX")
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
class LeagueModelTest(TestSetup):
def testLeagueInit(self):
League(str(uuid.uuid1()))
# now bad stuff
try:
League(1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
def testLeagueUpdate(self):
league = League(str(uuid.uuid1()))
league.update(str(uuid.uuid1()))
try:
league.update(1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
class DivisionModelTest(TestSetup):
def testDivisionInit(self):
Division(str(uuid.uuid1()))
Division(str(uuid.uuid1()), shortname=str(uuid.uuid1()))
# now invalid init
try:
Division(1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
Division(str(uuid.uuid1()), shortname=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
def testDivisionUpdate(self):
division = Division(str(uuid.uuid1()))
division.update(name=str(uuid.uuid1()))
division.update(name=str(uuid.uuid1()), shortname=str(uuid.uuid1()))
division.update(shortname=str(uuid.uuid1()))
# now invalid updates
try:
division.update(name=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
division.update(shortname=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
class TeamModelTest(TestSetup):
def testTeamInit(self):
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
# good Teams
Team(color="Black",
sponsor_id=sponsor['sponsor_id'],
league_id=league['league_id'])
Team(color="Green",
sponsor_id=sponsor['sponsor_id'],
league_id=league['league_id'],
year=VALID_YEAR)
# now for bad teams
try:
Team(color="Black",
sponsor_id=INVALID_ID,
league_id=league['league_id'])
self.assertEqual(False, True, "Should raise no sponsor")
except SponsorDoesNotExist:
pass
try:
Team(color="Black",
sponsor_id=sponsor['sponsor_id'],
league_id=INVALID_ID)
self.assertEqual(False, True, "Should raise no league")
except LeagueDoesNotExist:
pass
try:
Team(color=1,
sponsor_id=sponsor['sponsor_id'],
league_id=league['league_id'])
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
Team(color="Black",
sponsor_id=sponsor['sponsor_id'],
league_id=league['league_id'],
year=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
def testTeamUpdate(self):
league_id = self.add_league(str(uuid.uuid1()))['league_id']
sponsor_id = self.add_sponsor(str(uuid.uuid1()))['sponsor_id']
# good Teams
team = Team(color="Black",
sponsor_id=sponsor_id,
league_id=league_id)
team.update(color="Green",
sponsor_id=sponsor_id,
league_id=league_id,
year=VALID_YEAR)
# now for bad teams
try:
team.update(color="Black",
sponsor_id=INVALID_ID,
league_id=league_id)
self.assertEqual(False, True, "Should raise no sponsor")
except SponsorDoesNotExist:
pass
try:
team.update(color="Black",
sponsor_id=sponsor_id,
league_id=INVALID_ID)
self.assertEqual(False, True, "Should raise no league")
except LeagueDoesNotExist:
pass
try:
team.update(color=1,
sponsor_id=sponsor_id,
league_id=league_id)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
team.update(color="Black",
sponsor_id=sponsor_id,
league_id=league_id,
year=1)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
def testIsPlayerNotOnTeam(self):
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team = self.add_team(color="Black",
sponsor=sponsor,
league=league)
player = self.add_player(
str(uuid.uuid1()), str(uuid.uuid1()) + "@mlsb.ca", gender="m")
team_model = Team.query.get(team['team_id'])
player_model = Player.query.get(player['player_id'])
self.assertFalse(team_model.is_player_on_team(
player_model), "New player should not be on team")
def testIsPlayerOnTeam(self):
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team = self.add_team(color="Black",
sponsor=sponsor,
league=league)
player = self.add_player(
str(uuid.uuid1()), str(uuid.uuid1()) + "@mlsb.ca", gender="m")
self.add_player_to_team(team, player)
team_model = Team.query.get(team['team_id'])
player_model = Player.query.get(player['player_id'])
self.assertTrue(team_model.is_player_on_team(
player_model), "New player added to team should be on team")
def testIsCaptainOnTeam(self):
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team = self.add_team(color="Black",
sponsor=sponsor,
league=league)
captain = self.add_player(
str(uuid.uuid1()), str(uuid.uuid1()) + "@mlsb.ca", gender="m")
self.add_player_to_team(team, captain, captain=True)
team_model = Team.query.get(team['team_id'])
captain_model = Player.query.get(captain['player_id'])
self.assertTrue(team_model.is_player_on_team(
captain_model), "Captain of team should be on team")
def testIsNoneOnTeam(self):
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team = self.add_team(color="Black",
sponsor=sponsor,
league=league)
team_model = Team.query.get(team['team_id'])
self.assertFalse(team_model.is_player_on_team(
None), "None should not be on team")
def testInsertingPlayer(self):
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team = self.add_team(color="Black",
sponsor=sponsor,
league=league)
player = self.add_player(str(uuid.uuid1()), str(
uuid.uuid1()) + "@mlsb.ca", gender="m")
team_model = Team.query.get(team['team_id'])
team_model.insert_player(player['player_id'])
self.assertTrue(team_model.is_player_on_team(Player.query.get(
player['player_id'])), "Expecting player to be added to team")
def testRemovePlayer(self):
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team = self.add_team(color="Blacl",
sponsor=sponsor,
league=league)
player = self.add_player(str(uuid.uuid1()), str(
uuid.uuid1()) + "@mlsb.ca", gender="m")
self.add_player_to_team(team, player, captain=True)
team_model = Team.query.get(team['team_id'])
team_model.remove_player(player['player_id'])
self.assertFalse(team_model.is_player_on_team(Player.query.get(
player['player_id'])), "Expecting player to be removed from team")
def testEspsysTotal(self):
"""Test that espys total work"""
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team = self.add_team(color="Blacl",
sponsor=sponsor,
league=league)
# award the team 3 espys points in from two different
# transactions
espy_one = self.add_espys(team, sponsor, points=1)
espy_two = self.add_espys(team, sponsor, points=2)
# assert that their total is 3 points
self.assertEqual(Team.query.options(undefer('espys_total'))
.get(team['team_id']).espys_total,
espy_one['points'] + espy_two['points'],
"Expecting 3 espys points to be awarded")
class GameModelTest(TestSetup):
def testGameInit(self):
sponsor = self.add_sponsor(str(uuid.uuid1()))
league = self.add_league(str(uuid.uuid1()))
league_id = league['league_id']
division = self.add_division(str(uuid.uuid1()))
division_id = division['division_id']
home_team_id = self.add_team(str(uuid.uuid1()),
sponsor,
league,
VALID_YEAR)['team_id']
away_team_id = self.add_team(str(uuid.uuid1()),
sponsor,
league,
VALID_YEAR)['team_id']
# good game
Game(getDateString(), getTimeString(),
home_team_id, away_team_id, league_id, division_id)
try:
Game("x", getTimeString(), home_team_id,
away_team_id, league_id, division_id)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
Game(getDateString(),
getTimeString(),
home_team_id,
away_team_id,
league_id,
division_id,
status=1)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
Game(getDateString(),
getTimeString(),
home_team_id,
away_team_id,
league_id,
division_id,
field=1)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
Game(getDateString(), getTimeString(),
INVALID_ID, away_team_id, league_id, division_id)
self.assertEqual(True, False, "should raise no team")
except TeamDoesNotExist:
pass
try:
Game(getDateString(), getTimeString(),
home_team_id, INVALID_ID, league_id, division_id)
self.assertEqual(True, False, "should raise no team")
except TeamDoesNotExist:
pass
try:
Game(getDateString(),
getTimeString(), home_team_id, away_team_id, INVALID_ID,
division_id)
self.assertEqual(True, False, "should raise no league")
except LeagueDoesNotExist:
pass
try:
Game(getDateString(),
getTimeString(), home_team_id, away_team_id, league_id,
INVALID_ID)
self.assertEqual(True, False, "should raise no league")
except DivisionDoesNotExist:
pass
def testGameUpdate(self):
sponsor = self.add_sponsor(str(uuid.uuid1()))
league = self.add_league(str(uuid.uuid1()))
league_id = league['league_id']
division = self.add_division(str(uuid.uuid1()))
division_id = division['division_id']
home_team_id = self.add_team(str(uuid.uuid1()),
sponsor,
league,
VALID_YEAR)['team_id']
away_team_id = self.add_team(str(uuid.uuid1()),
sponsor,
league,
VALID_YEAR)['team_id']
# good game
g = Game(getDateString(),
getTimeString(), home_team_id, away_team_id, league_id,
division_id)
try:
g.update(getDateString(),
getTimeString(),
home_team_id,
away_team_id,
league_id,
status=1)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
g.update(getDateString(),
getTimeString(),
home_team_id,
away_team_id,
league_id,
field=1)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
g.update(getDateString(),
getTimeString(),
INVALID_ID,
away_team_id,
league_id)
self.assertEqual(True, False, "should raise no team")
except TeamDoesNotExist:
pass
try:
g.update(getDateString(),
getTimeString(),
home_team_id,
INVALID_ID,
league_id)
self.assertEqual(True, False, "should raise no team")
except TeamDoesNotExist:
pass
try:
g.update(getDateString(),
getTimeString(),
home_team_id,
away_team_id,
INVALID_ID)
self.assertEqual(True, False, "should raise no league")
except LeagueDoesNotExist:
pass
class JoinLeagueRequestTest(TestSetup):
"""Test the join league request model"""
def testJoinLeagueRequestInit(self):
""" Test the constructor validates the given data"""
player = str(uuid.uuid1())
email = player + "@mlsb.ca"
some_gender = "m"
color = str(uuid.uuid1())
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
no_team = Team(color="Black",
sponsor_id=sponsor['sponsor_id'],
league_id=league['league_id'])
team_json = self.add_team(color, sponsor=sponsor, league=league)
team = Team.query.get(team_json['team_id'])
# good request and test json method
league_request = JoinLeagueRequest(email, player, team, some_gender)
league_request.json()
# bad stuff
try:
JoinLeagueRequest(1, player, team, some_gender)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
JoinLeagueRequest(email, 1, team, some_gender)
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
try:
JoinLeagueRequest(email, player, "wrong team", some_gender)
self.assertEqual(False, True, "Should raise team does not exist")
except TeamDoesNotExist:
pass
try:
JoinLeagueRequest(email, player, no_team, some_gender)
self.assertEqual(False, True, "Should raise team does not exist")
except TeamDoesNotExist:
pass
try:
JoinLeagueRequest(email, player, team, "XX")
self.assertEqual(False, True, "Should raise invalid field")
except InvalidField:
pass
def testAcceptJoinLeagueRequestNewPlayer(self):
player = str(uuid.uuid1())
email = player + "@mlsb.ca"
some_gender = "m"
color = str(uuid.uuid1())
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team_json = self.add_team(color, sponsor=sponsor, league=league)
team = Team.query.get(team_json['team_id'])
league_request = JoinLeagueRequest(email, player, team, some_gender)
accepted_player = league_request.accept_request()
# check player is one team now
self.assertTrue(accepted_player.id is not None,
"Create player account when joining team")
team = Team.query.get(team_json['team_id'])
self.assertTrue(bool([True
for p in team.players
if p.email == email and p.id is not None]),
"New player added was not added to team")
def testAcceptJoinLeagueRequestExistingPlayer(self):
name = str(uuid.uuid1())
email = name + "@mlsb.ca"
some_gender = "m"
player = self.add_player(name, email, gender=some_gender)
color = str(uuid.uuid1())
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team_json = self.add_team(color, sponsor=sponsor, league=league)
team = Team.query.get(team_json['team_id'])
league_request = JoinLeagueRequest(email, player, team, some_gender)
accepted_player = league_request.accept_request()
# check player is one team now
self.assertEqual(accepted_player.id, player['player_id'],
"Use player account when joining team")
team = Team.query.get(team_json['team_id'])
self.assertTrue(bool([True
for p in team.players
if p.id == player['player_id']]),
"Existing player added was not added to team")
def testAcceptJoinLeagueRequestTwice(self):
name = str(uuid.uuid1())
email = name + "@mlsb.ca"
some_gender = "m"
player = self.add_player(name, email, gender=some_gender)
color = str(uuid.uuid1())
league = self.add_league(str(uuid.uuid1()))
sponsor = self.add_sponsor(str(uuid.uuid1()))
team_json = self.add_team(color, sponsor=sponsor, league=league)
team = Team.query.get(team_json['team_id'])
league_request = JoinLeagueRequest(email, player, team, some_gender)
league_request.accept_request()
try:
league_request.accept_request()
self.assertTrue(False,
"Should not be able to accept request league twice")
except HaveLeagueRequestException:
pass
class BatModelTest(TestSetup):
def testBatInit(self):
player_name = str(uuid.uuid1())
email = player_name + "@mlsb.ca"
player = self.add_player(player_name, email)
player_id = player['player_id']
sponsor = self.add_sponsor(str(uuid.uuid1()))
league = self.add_league(str(uuid.uuid1()))
division = self.add_division(str(uuid.uuid1()))
home_team = self.add_team(str(uuid.uuid1()),
sponsor,
league,
VALID_YEAR)
home_team_id = home_team['team_id']
away_team = self.add_team(str(uuid.uuid1()),
sponsor,
league,
VALID_YEAR)
game = self.add_game(getDateString(),
getTimeString(), home_team, away_team, league,
division)
game_id = game['game_id']
# good bat
Bat(player_id, home_team_id, game_id, "s", inning=1, rbi=1)
# now for the bad stuff
try:
Bat(player_id, home_team_id, game_id, "XX", inning=1, rbi=1)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
Bat(player_id, home_team_id, game_id, "s", inning=-1, rbi=1)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
Bat(player_id, home_team_id, game_id, "s", inning=1, rbi=1000)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
Bat(INVALID_ID, home_team_id, game_id, "s", inning=1, rbi=1)
self.assertEqual(True, False, "should raise no player")
except PlayerDoesNotExist:
pass
try:
Bat(player_id, INVALID_ID, game_id, "s", inning=1, rbi=1)
self.assertEqual(True, False, "should raise no team")
except TeamDoesNotExist:
pass
try:
Bat(player_id, home_team_id, INVALID_ID, "s", inning=1, rbi=1)
self.assertEqual(True, False, "should raise no league")
except GameDoesNotExist:
pass
def testBatUpdate(self):
player_name = str(uuid.uuid1())
email = player_name + "@mlsb.ca"
player = self.add_player(player_name, email)
player_id = player['player_id']
sponsor = self.add_sponsor(str(uuid.uuid1()))
league = self.add_league(str(uuid.uuid1()))
division = self.add_division(str(uuid.uuid1()))
home_team = self.add_team(str(uuid.uuid1()),
sponsor,
league,
VALID_YEAR)
home_team_id = home_team['team_id']
away_team = self.add_team(str(uuid.uuid1()),
sponsor,
league,
VALID_YEAR)
game = self.add_game(getDateString(),
getTimeString(), home_team, away_team, league,
division)
game_id = game['game_id']
# good bat
b = Bat(player_id, home_team_id, game_id, "s", inning=1, rbi=1)
# now for the bad stuff
try:
b.update(player_id=player_id,
team_id=home_team_id,
game_id=game_id,
hit="XX",
inning=1,
rbi=1)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
b.update(player_id=player_id,
team_id=home_team_id,
game_id=game_id,
hit="s",
inning=-1,
rbi=1)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
b.update(player_id=player_id,
team_id=home_team_id,
game_id=game_id,
hit="s",
inning=1,
rbi=1000)
self.assertEqual(True, False, "should raise invalid field")
except InvalidField:
pass
try:
b.update(player_id=INVALID_ID,
team_id=home_team_id,
game_id=game_id,
hit="s",
inning=1,
rbi=1)
self.assertEqual(True, False, "should raise no player")
except PlayerDoesNotExist:
pass
try:
b.update(player_id=player_id,
team_id=INVALID_ID,
game_id=game_id,
hit="s",
inning=1,
rbi=1)
self.assertEqual(True, False, "should raise no team")
except TeamDoesNotExist:
pass
try:
b.update(player_id=player_id,
team_id=home_team_id,
game_id=INVALID_ID,
hit="s",
inning=1,
rbi=1)
self.assertEqual(True, False, "should raise no league")
except GameDoesNotExist:
pass
def getDateString():
"""Returns the current date string"""
return datetime.now().strftime("%Y-%m-%d")
def getTimeString():
"""Returns the current time string"""
return datetime.now().strftime("%H:%M")
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 37.57
| 80
| 0.539659
| 3,132
| 30,056
| 5.018199
| 0.069604
| 0.040084
| 0.068715
| 0.052682
| 0.828975
| 0.800216
| 0.785901
| 0.78132
| 0.76764
| 0.748234
| 0
| 0.008811
| 0.361825
| 30,056
| 799
| 81
| 37.617021
| 0.810594
| 0.026983
| 0
| 0.771755
| 0
| 0
| 0.092551
| 0
| 0
| 0
| 0
| 0
| 0.098431
| 1
| 0.038516
| false
| 0.085592
| 0.009986
| 0
| 0.062767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
83270b2fae2ca5ac12d418982de5c725c5527e27
| 247
|
py
|
Python
|
bitmovin_api_sdk/encoding/infrastructure/azure/regions/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/infrastructure/azure/regions/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/infrastructure/azure/regions/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.infrastructure.azure.regions.regions_api import RegionsApi
from bitmovin_api_sdk.encoding.infrastructure.azure.regions.azure_account_region_settings_list_query_params import AzureAccountRegionSettingsListQueryParams
| 82.333333
| 156
| 0.927126
| 29
| 247
| 7.517241
| 0.586207
| 0.110092
| 0.137615
| 0.165138
| 0.477064
| 0.477064
| 0.477064
| 0.477064
| 0
| 0
| 0
| 0
| 0.032389
| 247
| 2
| 157
| 123.5
| 0.912134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
55f4698cea908f80631d4a0a1dc7ff5523afbbaa
| 42,074
|
py
|
Python
|
cl_sii/rcv/parse_csv.py
|
fyntex/lib-cl-sii-python
|
b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34
|
[
"MIT"
] | 8
|
2020-03-07T19:58:40.000Z
|
2021-12-15T13:47:40.000Z
|
cl_sii/rcv/parse_csv.py
|
fyntex/lib-cl-sii-python
|
b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34
|
[
"MIT"
] | 141
|
2020-01-17T22:47:35.000Z
|
2022-03-31T18:29:47.000Z
|
cl_sii/rcv/parse_csv.py
|
fyntex/lib-cl-sii-python
|
b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34
|
[
"MIT"
] | 3
|
2020-03-07T20:30:02.000Z
|
2021-03-22T03:14:26.000Z
|
"""
Parse RCV files (CSV)
=====================
"""
import csv
import logging
from datetime import date, datetime
from typing import Callable, Dict, Iterable, Optional, Sequence, Tuple
import marshmallow
from cl_sii.base.constants import SII_OFFICIAL_TZ
from cl_sii.extras import mm_fields
from cl_sii.libs import csv_utils
from cl_sii.libs import mm_utils
from cl_sii.libs import rows_processing
from cl_sii.libs import tz_utils
from cl_sii.rut import Rut
from .constants import RcEstadoContable, RcvKind
from .data_models import (
RcvDetalleEntry, RcNoIncluirDetalleEntry,
RcPendienteDetalleEntry, RcReclamadoDetalleEntry,
RcRegistroDetalleEntry, RvDetalleEntry,
)
logger = logging.getLogger(__name__)
RcvCsvFileParserType = Callable[
[Rut, str, int, Optional[int]],
Iterable[
Tuple[
Optional[RcvDetalleEntry],
int,
Dict[str, object],
Dict[str, object],
],
],
]
def get_rcv_csv_file_parser(
rcv_kind: RcvKind,
estado_contable: Optional[RcEstadoContable],
) -> RcvCsvFileParserType:
"""
Return a function that parses a CSV file of the given :class:`RcvKind` and
:class:`RcEstadoContable`.
:raises ValueError:
:raises Exception: on unrecoverable errors
"""
parse_func: RcvCsvFileParserType
if rcv_kind == RcvKind.COMPRAS:
if estado_contable is None:
raise ValueError(
"'estado_contable' must not be None when 'rcv_kind' is 'COMPRAS'.",
)
elif estado_contable == RcEstadoContable.REGISTRO:
parse_func = parse_rcv_compra_registro_csv_file
elif estado_contable == RcEstadoContable.NO_INCLUIR:
parse_func = parse_rcv_compra_no_incluir_csv_file
elif estado_contable == RcEstadoContable.RECLAMADO:
parse_func = parse_rcv_compra_reclamado_csv_file
elif estado_contable == RcEstadoContable.PENDIENTE:
parse_func = parse_rcv_compra_pendiente_csv_file
else:
raise Exception(
"Programming error. No handler for given 'estado_contable'.",
estado_contable,
)
elif rcv_kind == RcvKind.VENTAS:
if estado_contable is not None:
raise ValueError("'estado_contable' must be None when 'rcv_kind' is 'VENTAS'.")
parse_func = parse_rcv_venta_csv_file
else:
raise Exception("Programming error. No handler for given 'rcv_kind'.", rcv_kind)
return parse_func
def parse_rcv_venta_csv_file(
rut: Rut,
input_file_path: str,
n_rows_offset: int = 0,
max_n_rows: int = None,
) -> Iterable[Tuple[Optional[RvDetalleEntry], int, Dict[str, object], Dict[str, object]]]:
"""
Parse entries from an RV ("Registro de Ventas") (CSV file).
"""
# warning: this looks like it would be executed before the iteration begins but it is not.
schema_context = dict(
emisor_rut=rut,
)
input_csv_row_schema = RcvVentaCsvRowSchema(context=schema_context)
expected_input_field_names = (
'Nro',
'Tipo Doc', # 'tipo_docto'
'Tipo Venta',
'Rut cliente', # 'receptor_rut'
'Razon Social', # 'receptor_razon_social'
'Folio', # 'folio'
'Fecha Docto', # 'fecha_emision_date'
'Fecha Recepcion', # 'fecha_recepcion_dt'
'Fecha Acuse Recibo', # 'fecha_acuse_dt'
'Fecha Reclamo', # 'fecha_reclamo_dt'
'Monto Exento',
'Monto Neto',
'Monto IVA',
'Monto total', # 'monto_total'
'IVA Retenido Total',
'IVA Retenido Parcial',
'IVA no retenido',
'IVA propio',
'IVA Terceros',
'RUT Emisor Liquid. Factura',
'Neto Comision Liquid. Factura',
'Exento Comision Liquid. Factura',
'IVA Comision Liquid. Factura',
'IVA fuera de plazo',
'Tipo Docto. Referencia',
'Folio Docto. Referencia',
'Num. Ident. Receptor Extranjero',
'Nacionalidad Receptor Extranjero',
'Credito empresa constructora',
'Impto. Zona Franca (Ley 18211)',
'Garantia Dep. Envases',
'Indicador Venta sin Costo',
'Indicador Servicio Periodico',
'Monto No facturable',
'Total Monto Periodo',
'Venta Pasajes Transporte Nacional',
'Venta Pasajes Transporte Internacional',
'Numero Interno',
'Codigo Sucursal',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Imp.',
'Valor Otro Imp.',
'Tasa Otro Imp.',
)
fields_to_remove_names = (
'Nro',
'Tipo Venta',
'Monto Exento',
'Monto Neto',
'Monto IVA',
'IVA Retenido Total',
'IVA Retenido Parcial',
'IVA no retenido',
'IVA propio',
'IVA Terceros',
'RUT Emisor Liquid. Factura',
'Neto Comision Liquid. Factura',
'Exento Comision Liquid. Factura',
'IVA Comision Liquid. Factura',
'IVA fuera de plazo',
'Tipo Docto. Referencia',
'Folio Docto. Referencia',
'Num. Ident. Receptor Extranjero',
'Nacionalidad Receptor Extranjero',
'Credito empresa constructora',
'Impto. Zona Franca (Ley 18211)',
'Garantia Dep. Envases',
'Indicador Venta sin Costo',
'Indicador Servicio Periodico',
'Monto No facturable',
'Total Monto Periodo',
'Venta Pasajes Transporte Nacional',
'Venta Pasajes Transporte Internacional',
'Numero Interno',
'Codigo Sucursal',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Imp.',
'Valor Otro Imp.',
'Tasa Otro Imp.',
)
# note: mypy will complain about returned dataclass type mismatch (and it is right to do so)
# but we know from logic which subclass of 'RcvDetalleEntry' will be yielded.
yield from _parse_rcv_csv_file( # type: ignore
input_csv_row_schema,
expected_input_field_names,
fields_to_remove_names,
input_file_path,
n_rows_offset,
max_n_rows,
)
def parse_rcv_compra_registro_csv_file(
rut: Rut,
input_file_path: str,
n_rows_offset: int = 0,
max_n_rows: int = None,
) -> Iterable[Tuple[Optional[RcRegistroDetalleEntry], int, Dict[str, object], Dict[str, object]]]:
"""
Parse entries from an RC ("Registro de Compras") / "registro" (CSV file).
"""
# warning: this looks like it would be executed before the iteration begins but it is not.
schema_context = dict(
receptor_rut=rut,
)
input_csv_row_schema = RcvCompraRegistroCsvRowSchema(context=schema_context)
expected_input_field_names = (
'Nro',
'Tipo Doc', # 'tipo_docto'
'Tipo Compra',
'RUT Proveedor', # 'emisor_rut'
'Razon Social', # 'emisor_razon_social'
'Folio', # 'folio'
'Fecha Docto', # 'fecha_emision_date'
'Fecha Recepcion', # 'fecha_recepcion_dt'
'Fecha Acuse', # 'fecha_acuse_dt'
'Monto Exento',
'Monto Neto',
'Monto IVA Recuperable',
'Monto Iva No Recuperable',
'Codigo IVA No Rec.',
'Monto Total', # 'monto_total'
'Monto Neto Activo Fijo',
'IVA Activo Fijo',
'IVA uso Comun',
'Impto. Sin Derecho a Credito',
'IVA No Retenido',
'Tabacos Puros',
'Tabacos Cigarrillos',
'Tabacos Elaborados',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Impuesto',
'Valor Otro Impuesto',
'Tasa Otro Impuesto',
)
fields_to_remove_names = (
'Nro',
'Tipo Compra',
'Monto Exento',
'Monto Neto',
'Monto IVA Recuperable',
'Monto Iva No Recuperable',
'Codigo IVA No Rec.',
'Monto Neto Activo Fijo',
'IVA Activo Fijo',
'IVA uso Comun',
'Impto. Sin Derecho a Credito',
'IVA No Retenido',
'Tabacos Puros',
'Tabacos Cigarrillos',
'Tabacos Elaborados',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Impuesto',
'Valor Otro Impuesto',
'Tasa Otro Impuesto',
)
# note: mypy will complain about returned dataclass type mismatch (and it is right to do so)
# but we know from logic which subclass of 'RcvDetalleEntry' will be yielded.
yield from _parse_rcv_csv_file( # type: ignore
input_csv_row_schema,
expected_input_field_names,
fields_to_remove_names,
input_file_path,
n_rows_offset,
max_n_rows,
)
def parse_rcv_compra_no_incluir_csv_file(
rut: Rut,
input_file_path: str,
n_rows_offset: int = 0,
max_n_rows: int = None,
) -> Iterable[Tuple[Optional[RcNoIncluirDetalleEntry], int, Dict[str, object], Dict[str, object]]]:
"""
Parse entries from an RC ("Registro de Compras") / "no incluir" (CSV file).
"""
# warning: this looks like it would be executed before the iteration begins but it is not.
schema_context = dict(
receptor_rut=rut,
)
input_csv_row_schema = RcvCompraNoIncluirCsvRowSchema(context=schema_context)
expected_input_field_names = (
'Nro',
'Tipo Doc', # 'tipo_docto'
'Tipo Compra',
'RUT Proveedor', # 'emisor_rut'
'Razon Social', # 'emisor_razon_social'
'Folio', # 'folio'
'Fecha Docto', # 'fecha_emision_date'
'Fecha Recepcion', # 'fecha_recepcion_dt'
'Fecha Acuse', # 'fecha_acuse_dt'
'Monto Exento',
'Monto Neto',
'Monto IVA Recuperable',
'Monto Iva No Recuperable',
'Codigo IVA No Rec.',
'Monto Total', # 'monto_total'
'Monto Neto Activo Fijo',
'IVA Activo Fijo',
'IVA uso Comun',
'Impto. Sin Derecho a Credito',
'IVA No Retenido',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Impuesto',
'Valor Otro Impuesto',
'Tasa Otro Impuesto',
)
fields_to_remove_names = (
'Nro',
'Tipo Compra',
'Monto Exento',
'Monto Neto',
'Monto IVA Recuperable',
'Monto Iva No Recuperable',
'Codigo IVA No Rec.',
'Monto Neto Activo Fijo',
'IVA Activo Fijo',
'IVA uso Comun',
'Impto. Sin Derecho a Credito',
'IVA No Retenido',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Impuesto',
'Valor Otro Impuesto',
'Tasa Otro Impuesto',
)
# note: mypy will complain about returned dataclass type mismatch (and it is right to do so)
# but we know from logic which subclass of 'RcvDetalleEntry' will be yielded.
yield from _parse_rcv_csv_file( # type: ignore
input_csv_row_schema,
expected_input_field_names,
fields_to_remove_names,
input_file_path,
n_rows_offset,
max_n_rows,
)
def parse_rcv_compra_reclamado_csv_file(
rut: Rut,
input_file_path: str,
n_rows_offset: int = 0,
max_n_rows: int = None,
) -> Iterable[Tuple[Optional[RcReclamadoDetalleEntry], int, Dict[str, object], Dict[str, object]]]:
"""
Parse entries from an RC ("Registro de Compras") / "reclamado" (CSV file).
"""
# warning: this looks like it would be executed before the iteration begins but it is not.
schema_context = dict(
receptor_rut=rut,
)
input_csv_row_schema = RcvCompraReclamadoCsvRowSchema(context=schema_context)
expected_input_field_names = (
'Nro',
'Tipo Doc', # 'tipo_docto'
'Tipo Compra',
'RUT Proveedor', # 'emisor_rut'
'Razon Social', # 'emisor_razon_social'
'Folio', # 'folio'
'Fecha Docto', # 'fecha_emision_date'
'Fecha Recepcion', # 'fecha_recepcion_dt'
'Fecha Reclamo', # 'fecha_reclamo_dt'
'Monto Exento',
'Monto Neto',
'Monto IVA Recuperable',
'Monto Iva No Recuperable',
'Codigo IVA No Rec.',
'Monto Total', # 'monto_total'
'Monto Neto Activo Fijo',
'IVA Activo Fijo',
'IVA uso Comun',
'Impto. Sin Derecho a Credito',
'IVA No Retenido',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Impuesto',
'Valor Otro Impuesto',
'Tasa Otro Impuesto',
)
fields_to_remove_names = (
'Nro',
'Tipo Compra',
'Monto Exento',
'Monto Neto',
'Monto IVA Recuperable',
'Monto Iva No Recuperable',
'Codigo IVA No Rec.',
'Monto Neto Activo Fijo',
'IVA Activo Fijo',
'IVA uso Comun',
'Impto. Sin Derecho a Credito',
'IVA No Retenido',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Impuesto',
'Valor Otro Impuesto',
'Tasa Otro Impuesto',
)
# note: mypy will complain about returned dataclass type mismatch (and it is right to do so)
# but we know from logic which subclass of 'RcvDetalleEntry' will be yielded.
yield from _parse_rcv_csv_file( # type: ignore
input_csv_row_schema,
expected_input_field_names,
fields_to_remove_names,
input_file_path,
n_rows_offset,
max_n_rows,
)
def parse_rcv_compra_pendiente_csv_file(
rut: Rut,
input_file_path: str,
n_rows_offset: int = 0,
max_n_rows: int = None,
) -> Iterable[Tuple[Optional[RcPendienteDetalleEntry], int, Dict[str, object], Dict[str, object]]]:
"""
Parse entries from an RC ("Registro de Compras") / "pendiente" (CSV file).
"""
# warning: this looks like it would be executed before the iteration begins but it is not.
schema_context = dict(
receptor_rut=rut,
)
input_csv_row_schema = RcvCompraPendienteCsvRowSchema(context=schema_context)
expected_input_field_names = (
'Nro',
'Tipo Doc', # 'tipo_docto'
'Tipo Compra',
'RUT Proveedor', # 'emisor_rut'
'Razon Social', # 'emisor_razon_social'
'Folio', # 'folio'
'Fecha Docto', # 'fecha_emision_date'
'Fecha Recepcion', # 'fecha_recepcion_dt'
'Monto Exento',
'Monto Neto',
'Monto IVA Recuperable',
'Monto Iva No Recuperable',
'Codigo IVA No Rec.',
'Monto Total', # 'monto_total'
'Monto Neto Activo Fijo',
'IVA Activo Fijo',
'IVA uso Comun',
'Impto. Sin Derecho a Credito',
'IVA No Retenido',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Impuesto',
'Valor Otro Impuesto',
'Tasa Otro Impuesto',
)
fields_to_remove_names = (
'Nro',
'Tipo Compra',
'Monto Exento',
'Monto Neto',
'Monto IVA Recuperable',
'Monto Iva No Recuperable',
'Codigo IVA No Rec.',
'Monto Neto Activo Fijo',
'IVA Activo Fijo',
'IVA uso Comun',
'Impto. Sin Derecho a Credito',
'IVA No Retenido',
'NCE o NDE sobre Fact. de Compra',
'Codigo Otro Impuesto',
'Valor Otro Impuesto',
'Tasa Otro Impuesto',
)
# note: mypy will complain about returned dataclass type mismatch (and it is right to do so)
# but we know from logic which subclass of 'RcvDetalleEntry' will be yielded.
yield from _parse_rcv_csv_file( # type: ignore
input_csv_row_schema,
expected_input_field_names,
fields_to_remove_names,
input_file_path,
n_rows_offset,
max_n_rows,
)
###############################################################################
# schemas
###############################################################################
class _RcvCsvRowSchemaBase(marshmallow.Schema):
@marshmallow.validates_schema(pass_original=True)
def validate_schema(self, data: dict, original_data: dict) -> None:
mm_utils.validate_no_unexpected_input_fields(self, data, original_data)
# @marshmallow.validates('field_x')
# def validate_field_x(self, value):
# pass
def to_detalle_entry(self, data: dict) -> RcvDetalleEntry:
raise NotImplementedError
class RcvVentaCsvRowSchema(_RcvCsvRowSchemaBase):
FIELD_FECHA_RECEPCION_DT_TZ = SII_OFFICIAL_TZ
FIELD_FECHA_ACUSE_DT_TZ = SII_OFFICIAL_TZ
FIELD_FECHA_RECLAMO_DT_TZ = SII_OFFICIAL_TZ
class Meta:
strict = True
###########################################################################
# basic fields
###########################################################################
tipo_docto = mm_fields.RcvTipoDoctoField(
required=True,
load_from='Tipo Doc',
)
folio = marshmallow.fields.Integer(
required=True,
load_from='Folio',
)
fecha_emision_date = mm_utils.CustomMarshmallowDateField(
format='%d/%m/%Y', # e.g. '22/10/2018'
required=True,
load_from='Fecha Docto',
)
receptor_rut = mm_fields.RutField(
required=True,
load_from='Rut cliente',
)
monto_total = marshmallow.fields.Integer(
required=True,
load_from='Monto total',
)
receptor_razon_social = marshmallow.fields.String(
required=True,
load_from='Razon Social',
)
###########################################################################
# fields whose value is set using data passed in the schema context
###########################################################################
emisor_rut = mm_fields.RutField(
required=True,
)
###########################################################################
# extra fields: not included in the returned struct
###########################################################################
fecha_recepcion_dt = marshmallow.fields.DateTime(
format='%d/%m/%Y %H:%M:%S', # e.g. '23/10/2018 01:54:13'
required=True,
load_from='Fecha Recepcion',
)
fecha_acuse_dt = marshmallow.fields.DateTime(
format='%d/%m/%Y %H:%M:%S', # e.g. '23/10/2018 01:54:13'
required=False,
allow_none=True,
load_from='Fecha Acuse Recibo',
)
fecha_reclamo_dt = marshmallow.fields.DateTime(
format='%d/%m/%Y %H:%M:%S', # e.g. '23/10/2018 01:54:13'
required=False,
allow_none=True,
load_from='Fecha Reclamo',
)
@marshmallow.pre_load
def preprocess(self, in_data: dict) -> dict:
# note: required fields checks are run later on automatically thus we may not assume that
# values of required fields (`required=True`) exist.
# Set field value only if it was not in the input data.
in_data.setdefault('emisor_rut', self.context['emisor_rut'])
# Fix missing/default values.
if 'Fecha Acuse Recibo' in in_data:
if in_data['Fecha Acuse Recibo'] == '':
in_data['Fecha Acuse Recibo'] = None
if 'Fecha Reclamo' in in_data:
if in_data['Fecha Reclamo'] == '':
in_data['Fecha Reclamo'] = None
return in_data
@marshmallow.post_load
def postprocess(self, data: dict) -> dict:
# >>> data['fecha_recepcion_dt'].isoformat()
# '2018-10-23T01:54:13'
data['fecha_recepcion_dt'] = tz_utils.convert_naive_dt_to_tz_aware(
dt=data['fecha_recepcion_dt'], tz=self.FIELD_FECHA_RECEPCION_DT_TZ)
# >>> data['fecha_recepcion_dt'].isoformat()
# '2018-10-23T01:54:13-03:00'
# >>> data['fecha_recepcion_dt'].astimezone(pytz.UTC).isoformat()
# '2018-10-23T04:54:13+00:00'
# note: to express this value in another timezone (but the value does not change), do
# `dt_obj.astimezone(pytz.timezone('some timezone'))`
if 'fecha_acuse_dt' in data and data['fecha_acuse_dt']:
data['fecha_acuse_dt'] = tz_utils.convert_naive_dt_to_tz_aware(
dt=data['fecha_acuse_dt'], tz=self.FIELD_FECHA_ACUSE_DT_TZ)
if 'fecha_reclamo_dt' in data and data['fecha_reclamo_dt']:
data['fecha_reclamo_dt'] = tz_utils.convert_naive_dt_to_tz_aware(
dt=data['fecha_reclamo_dt'], tz=self.FIELD_FECHA_RECLAMO_DT_TZ)
# Remove leading and trailing whitespace.
data['receptor_razon_social'] = data['receptor_razon_social'].strip()
return data
def to_detalle_entry(self, data: dict) -> RvDetalleEntry:
try:
emisor_rut: Rut = data['emisor_rut'] # type: ignore
tipo_docto = data['tipo_docto'] # type: ignore
folio: int = data['folio'] # type: ignore
fecha_emision_date: date = data['fecha_emision_date'] # type: ignore
receptor_rut: Rut = data['receptor_rut'] # type: ignore
monto_total: int = data['monto_total'] # type: ignore
receptor_razon_social: str = data['receptor_razon_social'] # type: ignore
fecha_recepcion_dt: datetime = data['fecha_recepcion_dt'] # type: ignore
fecha_acuse_dt: Optional[datetime] = data['fecha_acuse_dt'] # type: ignore
fecha_reclamo_dt: Optional[datetime] = data['fecha_reclamo_dt'] # type: ignore
except KeyError as exc:
raise ValueError("Programming error: a referenced field is missing.") from exc
try:
detalle_entry = RvDetalleEntry(
emisor_rut=emisor_rut,
tipo_docto=tipo_docto,
folio=folio,
fecha_emision_date=fecha_emision_date,
receptor_rut=receptor_rut,
monto_total=monto_total,
receptor_razon_social=receptor_razon_social,
fecha_recepcion_dt=fecha_recepcion_dt,
fecha_acuse_dt=fecha_acuse_dt,
fecha_reclamo_dt=fecha_reclamo_dt,
)
except (TypeError, ValueError):
raise
return detalle_entry
class RcvCompraRegistroCsvRowSchema(_RcvCsvRowSchemaBase):
FIELD_FECHA_RECEPCION_DT_TZ = SII_OFFICIAL_TZ
FIELD_FECHA_ACUSE_DT_TZ = SII_OFFICIAL_TZ
class Meta:
strict = True
###########################################################################
# basic fields
###########################################################################
emisor_rut = mm_fields.RutField(
required=True,
load_from='RUT Proveedor',
)
tipo_docto = mm_fields.RcvTipoDoctoField(
required=True,
load_from='Tipo Doc',
)
folio = marshmallow.fields.Integer(
required=True,
load_from='Folio',
)
fecha_emision_date = mm_utils.CustomMarshmallowDateField(
format='%d/%m/%Y', # e.g. '22/10/2018'
required=True,
load_from='Fecha Docto',
)
monto_total = marshmallow.fields.Integer(
required=True,
load_from='Monto Total',
)
emisor_razon_social = marshmallow.fields.String(
required=True,
load_from='Razon Social',
)
###########################################################################
# fields whose value is set using data passed in the schema context
###########################################################################
receptor_rut = mm_fields.RutField(
required=True,
)
###########################################################################
# extra fields: not included in the returned struct
###########################################################################
fecha_recepcion_dt = marshmallow.fields.DateTime(
format='%d/%m/%Y %H:%M:%S', # e.g. '23/10/2018 01:54:13'
required=True,
load_from='Fecha Recepcion',
)
fecha_acuse_dt = marshmallow.fields.DateTime(
format='%d/%m/%Y %H:%M:%S', # e.g. '23/10/2018 01:54:13'
required=True,
allow_none=True,
load_from='Fecha Acuse',
)
@marshmallow.pre_load
def preprocess(self, in_data: dict) -> dict:
# note: required fields checks are run later on automatically thus we may not assume that
# values of required fields (`required=True`) exist.
# Set field value only if it was not in the input data.
in_data.setdefault('receptor_rut', self.context['receptor_rut'])
# Fix missing/default values.
if 'Fecha Acuse' in in_data:
if in_data['Fecha Acuse'] == '':
in_data['Fecha Acuse'] = None
return in_data
@marshmallow.post_load
def postprocess(self, data: dict) -> dict:
# >>> data['fecha_recepcion_dt'].isoformat()
# '2018-10-23T01:54:13'
data['fecha_recepcion_dt'] = tz_utils.convert_naive_dt_to_tz_aware(
dt=data['fecha_recepcion_dt'], tz=self.FIELD_FECHA_RECEPCION_DT_TZ)
# >>> data['fecha_recepcion_dt'].isoformat()
# '2018-10-23T01:54:13-03:00'
# >>> data['fecha_recepcion_dt'].astimezone(pytz.UTC).isoformat()
# '2018-10-23T04:54:13+00:00'
if data['fecha_acuse_dt']:
data['fecha_acuse_dt'] = tz_utils.convert_naive_dt_to_tz_aware(
dt=data['fecha_acuse_dt'], tz=self.FIELD_FECHA_ACUSE_DT_TZ)
# note: to express this value in another timezone (but the value does not change), do
# `dt_obj.astimezone(pytz.timezone('some timezone'))`
# Remove leading and trailing whitespace.
data['emisor_razon_social'] = data['emisor_razon_social'].strip()
return data
def to_detalle_entry(self, data: dict) -> RcRegistroDetalleEntry:
try:
emisor_rut: Rut = data['emisor_rut'] # type: ignore
tipo_docto = data['tipo_docto'] # type: ignore
folio: int = data['folio'] # type: ignore
fecha_emision_date: date = data['fecha_emision_date'] # type: ignore
receptor_rut: Rut = data['receptor_rut'] # type: ignore
monto_total: int = data['monto_total'] # type: ignore
emisor_razon_social: str = data['emisor_razon_social'] # type: ignore
fecha_recepcion_dt: datetime = data['fecha_recepcion_dt'] # type: ignore
fecha_acuse_dt: Optional[datetime] = data['fecha_acuse_dt'] # type: ignore
except KeyError as exc:
raise ValueError("Programming error: a referenced field is missing.") from exc
try:
detalle_entry = RcRegistroDetalleEntry(
emisor_rut=emisor_rut,
tipo_docto=tipo_docto,
folio=folio,
fecha_emision_date=fecha_emision_date,
receptor_rut=receptor_rut,
monto_total=monto_total,
emisor_razon_social=emisor_razon_social,
fecha_recepcion_dt=fecha_recepcion_dt,
fecha_acuse_dt=fecha_acuse_dt,
)
except (TypeError, ValueError):
raise
return detalle_entry
class RcvCompraNoIncluirCsvRowSchema(RcvCompraRegistroCsvRowSchema):
def to_detalle_entry(self, data: dict) -> RcNoIncluirDetalleEntry:
try:
emisor_rut: Rut = data['emisor_rut'] # type: ignore
tipo_docto = data['tipo_docto'] # type: ignore
folio: int = data['folio'] # type: ignore
fecha_emision_date: date = data['fecha_emision_date'] # type: ignore
receptor_rut: Rut = data['receptor_rut'] # type: ignore
monto_total: int = data['monto_total'] # type: ignore
emisor_razon_social: str = data['emisor_razon_social'] # type: ignore
fecha_recepcion_dt: datetime = data['fecha_recepcion_dt'] # type: ignore
fecha_acuse_dt: Optional[datetime] = data['fecha_acuse_dt'] # type: ignore
except KeyError as exc:
raise ValueError("Programming error: a referenced field is missing.") from exc
try:
detalle_entry = RcNoIncluirDetalleEntry(
emisor_rut=emisor_rut,
tipo_docto=tipo_docto,
folio=folio,
fecha_emision_date=fecha_emision_date,
receptor_rut=receptor_rut,
monto_total=monto_total,
emisor_razon_social=emisor_razon_social,
fecha_recepcion_dt=fecha_recepcion_dt,
fecha_acuse_dt=fecha_acuse_dt,
)
except (TypeError, ValueError):
raise
return detalle_entry
class RcvCompraReclamadoCsvRowSchema(_RcvCsvRowSchemaBase):
FIELD_FECHA_RECEPCION_DT_TZ = SII_OFFICIAL_TZ
FIELD_FECHA_RECLAMO_DT_TZ = SII_OFFICIAL_TZ
class Meta:
strict = True
###########################################################################
# basic fields
###########################################################################
emisor_rut = mm_fields.RutField(
required=True,
load_from='RUT Proveedor',
)
tipo_docto = mm_fields.RcvTipoDoctoField(
required=True,
load_from='Tipo Doc',
)
folio = marshmallow.fields.Integer(
required=True,
load_from='Folio',
)
fecha_emision_date = mm_utils.CustomMarshmallowDateField(
format='%d/%m/%Y', # e.g. '22/10/2018'
required=True,
load_from='Fecha Docto',
)
monto_total = marshmallow.fields.Integer(
required=True,
load_from='Monto Total',
)
emisor_razon_social = marshmallow.fields.String(
required=True,
load_from='Razon Social',
)
###########################################################################
# fields whose value is set using data passed in the schema context
###########################################################################
receptor_rut = mm_fields.RutField(
required=True,
)
###########################################################################
# extra fields: not included in the returned struct
###########################################################################
fecha_recepcion_dt = marshmallow.fields.DateTime(
format='%d/%m/%Y %H:%M:%S', # e.g. '23/10/2018 01:54:13'
required=True,
load_from='Fecha Recepcion',
)
fecha_reclamo_dt = marshmallow.fields.DateTime(
# note: for some reason the rows with 'tipo_docto' equal to
# '<RcvTipoDocto.NOTA_CREDITO_ELECTRONICA: 61>' (and maybe others as well) do not
# have this field set (always? we do not know).
format='%d/%m/%Y %H:%M:%S', # e.g. '23/10/2018 01:54:13'
required=False,
allow_none=True,
load_from='Fecha Reclamo',
)
@marshmallow.pre_load
def preprocess(self, in_data: dict) -> dict:
# note: required fields checks are run later on automatically thus we may not assume that
# values of required fields (`required=True`) exist.
# Set field value only if it was not in the input data.
in_data.setdefault('receptor_rut', self.context['receptor_rut'])
# Fix missing/default values.
# note: for some reason the rows with 'tipo_docto' equal to
# '<RcvTipoDocto.NOTA_CREDITO_ELECTRONICA: 61>' (and maybe others as well) do not
# have this field set (always? we do not know).
if 'Fecha Reclamo' in in_data:
if in_data['Fecha Reclamo'] == '' or 'null' in in_data['Fecha Reclamo']:
in_data['Fecha Reclamo'] = None
return in_data
@marshmallow.post_load
def postprocess(self, data: dict) -> dict:
# >>> data['fecha_recepcion_dt'].isoformat()
# '2018-10-23T01:54:13'
data['fecha_recepcion_dt'] = tz_utils.convert_naive_dt_to_tz_aware(
dt=data['fecha_recepcion_dt'], tz=self.FIELD_FECHA_RECEPCION_DT_TZ)
# >>> data['fecha_recepcion_dt'].isoformat()
# '2018-10-23T01:54:13-03:00'
# >>> data['fecha_recepcion_dt'].astimezone(pytz.UTC).isoformat()
# '2018-10-23T04:54:13+00:00'
if data['fecha_reclamo_dt']:
data['fecha_reclamo_dt'] = tz_utils.convert_naive_dt_to_tz_aware(
dt=data['fecha_reclamo_dt'], tz=self.FIELD_FECHA_RECLAMO_DT_TZ)
# note: to express this value in another timezone (but the value does not change), do
# `dt_obj.astimezone(pytz.timezone('some timezone'))`
# Remove leading and trailing whitespace.
data['emisor_razon_social'] = data['emisor_razon_social'].strip()
return data
def to_detalle_entry(self, data: dict) -> RcReclamadoDetalleEntry:
try:
emisor_rut: Rut = data['emisor_rut'] # type: ignore
tipo_docto = data['tipo_docto'] # type: ignore
folio: int = data['folio'] # type: ignore
fecha_emision_date: date = data['fecha_emision_date'] # type: ignore
receptor_rut: Rut = data['receptor_rut'] # type: ignore
monto_total: int = data['monto_total'] # type: ignore
emisor_razon_social: str = data['emisor_razon_social'] # type: ignore
fecha_recepcion_dt: datetime = data['fecha_recepcion_dt'] # type: ignore
fecha_reclamo_dt: Optional[datetime] = data['fecha_reclamo_dt'] # type: ignore
except KeyError as exc:
raise ValueError("Programming error: a referenced field is missing.") from exc
try:
detalle_entry = RcReclamadoDetalleEntry(
emisor_rut=emisor_rut,
tipo_docto=tipo_docto,
folio=folio,
fecha_emision_date=fecha_emision_date,
receptor_rut=receptor_rut,
monto_total=monto_total,
emisor_razon_social=emisor_razon_social,
fecha_recepcion_dt=fecha_recepcion_dt,
fecha_reclamo_dt=fecha_reclamo_dt,
)
except (TypeError, ValueError):
raise
return detalle_entry
class RcvCompraPendienteCsvRowSchema(_RcvCsvRowSchemaBase):
FIELD_FECHA_RECEPCION_DT_TZ = SII_OFFICIAL_TZ
FIELD_FECHA_ACUSE_DT_TZ = SII_OFFICIAL_TZ
class Meta:
strict = True
###########################################################################
# basic fields
###########################################################################
emisor_rut = mm_fields.RutField(
required=True,
load_from='RUT Proveedor',
)
tipo_docto = mm_fields.RcvTipoDoctoField(
required=True,
load_from='Tipo Doc',
)
folio = marshmallow.fields.Integer(
required=True,
load_from='Folio',
)
fecha_emision_date = mm_utils.CustomMarshmallowDateField(
format='%d/%m/%Y', # e.g. '22/10/2018'
required=True,
load_from='Fecha Docto',
)
monto_total = marshmallow.fields.Integer(
required=True,
load_from='Monto Total',
)
emisor_razon_social = marshmallow.fields.String(
required=True,
load_from='Razon Social',
)
###########################################################################
# fields whose value is set using data passed in the schema context
###########################################################################
receptor_rut = mm_fields.RutField(
required=True,
)
###########################################################################
# extra fields: not included in the returned struct
###########################################################################
fecha_recepcion_dt = marshmallow.fields.DateTime(
format='%d/%m/%Y %H:%M:%S', # e.g. '23/10/2018 01:54:13'
required=True,
load_from='Fecha Recepcion',
)
@marshmallow.pre_load
def preprocess(self, in_data: dict) -> dict:
# note: required fields checks are run later on automatically thus we may not assume that
# values of required fields (`required=True`) exist.
# Set field value only if it was not in the input data.
in_data.setdefault('receptor_rut', self.context['receptor_rut'])
# Fix missing/default values.
if 'Fecha Acuse' in in_data:
if in_data['Fecha Acuse'] == '':
in_data['Fecha Acuse'] = None
return in_data
@marshmallow.post_load
def postprocess(self, data: dict) -> dict:
# >>> data['fecha_recepcion_dt'].isoformat()
# '2018-10-23T01:54:13'
data['fecha_recepcion_dt'] = tz_utils.convert_naive_dt_to_tz_aware(
dt=data['fecha_recepcion_dt'], tz=self.FIELD_FECHA_RECEPCION_DT_TZ)
# >>> data['fecha_recepcion_dt'].isoformat()
# '2018-10-23T01:54:13-03:00'
# >>> data['fecha_recepcion_dt'].astimezone(pytz.UTC).isoformat()
# '2018-10-23T04:54:13+00:00'
# note: to express this value in another timezone (but the value does not change), do
# `dt_obj.astimezone(pytz.timezone('some timezone'))`
# Remove leading and trailing whitespace.
data['emisor_razon_social'] = data['emisor_razon_social'].strip()
return data
def to_detalle_entry(self, data: dict) -> RcPendienteDetalleEntry:
try:
emisor_rut: Rut = data['emisor_rut'] # type: ignore
tipo_docto = data['tipo_docto'] # type: ignore
folio: int = data['folio'] # type: ignore
fecha_emision_date: date = data['fecha_emision_date'] # type: ignore
receptor_rut: Rut = data['receptor_rut'] # type: ignore
monto_total: int = data['monto_total'] # type: ignore
emisor_razon_social: str = data['emisor_razon_social'] # type: ignore
fecha_recepcion_dt: datetime = data['fecha_recepcion_dt'] # type: ignore
except KeyError as exc:
raise ValueError("Programming error: a referenced field is missing.") from exc
try:
detalle_entry = RcPendienteDetalleEntry(
emisor_rut=emisor_rut,
tipo_docto=tipo_docto,
folio=folio,
fecha_emision_date=fecha_emision_date,
receptor_rut=receptor_rut,
monto_total=monto_total,
emisor_razon_social=emisor_razon_social,
fecha_recepcion_dt=fecha_recepcion_dt,
)
except (TypeError, ValueError):
raise
return detalle_entry
###############################################################################
# helpers
###############################################################################
class _RcvCsvDialect(csv.Dialect):
"""
CSV dialect of RCV CSV files.
The properties of this dialect were determined with the help of
:class:`csv.Sniffer`.
>>> import gzip
>>> filename = 'SII-download-RCV-file-http-body-response.csv.gz'
>>> with gzip.open(filename, 'rt', encoding='utf-8') as f:
... dialect = csv.Sniffer().sniff(f.read(50 * 1024))
"""
delimiter = ';'
quotechar = '"'
escapechar = None
doublequote = False
skipinitialspace = False
lineterminator = '\r\n'
quoting = csv.QUOTE_MINIMAL
def _parse_rcv_csv_file(
input_csv_row_schema: _RcvCsvRowSchemaBase,
expected_input_field_names: Sequence[str],
fields_to_remove_names: Sequence[str],
input_file_path: str,
n_rows_offset: int,
max_n_rows: int = None,
) -> Iterable[Tuple[Optional[RcvDetalleEntry], int, Dict[str, object], Dict[str, object]]]:
"""
Parse entries from an RC or RV (CSV file).
Common implementation for the different alternatives that depend on the
kind of RC and RV.
"""
for field_to_remove_name in fields_to_remove_names:
if field_to_remove_name not in expected_input_field_names:
raise Exception(
"Programming error: field to remove is not one of the expected ones.",
field_to_remove_name)
_CSV_ROW_DICT_EXTRA_FIELDS_KEY = '_extra_csv_fields_data'
fields_to_remove_names += (_CSV_ROW_DICT_EXTRA_FIELDS_KEY, ) # type: ignore
input_data_enc = 'utf-8'
# note:
# > If csvfile is a file object, it should be opened with newline=''
# https://docs.python.org/3/library/csv.html#csv.reader
with open(input_file_path, mode='rt', encoding=input_data_enc, newline='') as input_f:
# Create a CSV reader, with auto-detection of header names (first row).
csv_reader = csv_utils.create_csv_dict_reader(
input_f,
csv_dialect=_RcvCsvDialect,
row_dict_extra_fields_key=_CSV_ROW_DICT_EXTRA_FIELDS_KEY,
expected_fields_strict=True,
expected_field_names=expected_input_field_names,
)
g = rows_processing.csv_rows_mm_deserialization_iterator(
csv_reader,
row_schema=input_csv_row_schema,
n_rows_offset=n_rows_offset,
max_n_rows=max_n_rows,
fields_to_remove_names=fields_to_remove_names,
)
for row_ix, row_data, deserialized_row_data, validation_errors in g:
entry: Optional[RcvDetalleEntry] = None
row_errors: Dict[str, object] = {}
conversion_error = None
if not validation_errors:
try:
entry = input_csv_row_schema.to_detalle_entry(deserialized_row_data)
except Exception as exc:
conversion_error = str(exc)
logger.exception(
"Deserialized data to data model instance conversion failed "
"(probably a programming error).")
# Instead of empty dicts, lists, str, etc, we want to have None.
if validation_errors:
row_errors['validation'] = validation_errors
if conversion_error:
row_errors['other'] = conversion_error
yield entry, row_ix, row_data, row_errors
| 35.386039
| 99
| 0.586063
| 4,755
| 42,074
| 4.948475
| 0.08938
| 0.039269
| 0.038759
| 0.023799
| 0.831662
| 0.821589
| 0.79422
| 0.787973
| 0.7813
| 0.779516
| 0
| 0.012137
| 0.271498
| 42,074
| 1,188
| 100
| 35.415825
| 0.755538
| 0.182512
| 0
| 0.739232
| 0
| 0
| 0.213229
| 0.002665
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025611
| false
| 0.001164
| 0.016298
| 0
| 0.131548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
368e0aba44e309d7a6dcbdbbcfd093b2262d2ba7
| 58
|
py
|
Python
|
OPCDataTransfer/Loader/__init__.py
|
Shanginre/OPCDataTransfer
|
f72d1afc6909f371ce17123828c354efa369a5be
|
[
"MIT"
] | 1
|
2021-01-08T01:45:30.000Z
|
2021-01-08T01:45:30.000Z
|
OPCDataTransfer/Loader/__init__.py
|
Shanginre/OPCDataTransfer
|
f72d1afc6909f371ce17123828c354efa369a5be
|
[
"MIT"
] | null | null | null |
OPCDataTransfer/Loader/__init__.py
|
Shanginre/OPCDataTransfer
|
f72d1afc6909f371ce17123828c354efa369a5be
|
[
"MIT"
] | null | null | null |
from .Loader import LoaderType
from .Loader import Loader
| 19.333333
| 30
| 0.827586
| 8
| 58
| 6
| 0.5
| 0.416667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 58
| 2
| 31
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7fc3c40748a7c340e33064815c26258ffc95eabe
| 6,675
|
py
|
Python
|
tests/tests_test_workflow/test_test_args.py
|
abhinavGupta16/opensearch-build
|
28d5c7b683276b2bf78a8d28a06aa4090e0edbe2
|
[
"Apache-2.0"
] | null | null | null |
tests/tests_test_workflow/test_test_args.py
|
abhinavGupta16/opensearch-build
|
28d5c7b683276b2bf78a8d28a06aa4090e0edbe2
|
[
"Apache-2.0"
] | null | null | null |
tests/tests_test_workflow/test_test_args.py
|
abhinavGupta16/opensearch-build
|
28d5c7b683276b2bf78a8d28a06aa4090e0edbe2
|
[
"Apache-2.0"
] | null | null | null |
import logging
import os
import unittest
from unittest.mock import patch
from test_workflow.test_args import TestArgs
class TestTestArgs(unittest.TestCase):
ARGS_PY = os.path.realpath(
os.path.join(
os.path.dirname(__file__), "..", "..", "src", "run_bwc_test.py"
)
)
PATH = os.path.join(
os.path.dirname(__file__), "data"
)
TEST_MANIFEST_PATH = os.path.join(
os.path.dirname(__file__), "data", "test_manifest.yml"
)
TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH = os.path.join(
os.path.dirname(__file__), "data", "test-manifest-opensearch-dashboards.yml"
)
@patch("argparse._sys.argv", [ARGS_PY, TEST_MANIFEST_PATH])
def test_opensearch_default_with_opensearch_test_manifest(self):
test_args = TestArgs()
self.assertFalse(hasattr(test_args, "opensearch"))
self.assertFalse(hasattr(test_args, "opensearch-dashboards"))
self.assertIsNotNone(test_args.test_run_id)
self.assertIsNone(test_args.components)
self.assertFalse(test_args.keep)
self.assertEqual(test_args.logging_level, logging.INFO)
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_PATH)
@patch("argparse._sys.argv", [ARGS_PY, TEST_MANIFEST_PATH, "--component", "foo", "bar"])
def test_components(self):
test_args = TestArgs()
self.assertEqual(test_args.components, ["foo", "bar"])
@patch("argparse._sys.argv", [ARGS_PY, TEST_MANIFEST_PATH, "--paths", "opensearch=" + TEST_MANIFEST_PATH])
def test_opensearch_file_with_opensearch_test_manifest(self):
test_args = TestArgs()
self.assertEqual(test_args.paths.get("opensearch"), os.path.realpath(self.TEST_MANIFEST_PATH))
self.assertFalse(hasattr(test_args.paths, "opensearch-dashboards"))
self.assertIsNotNone(test_args.test_run_id)
self.assertIsNone(test_args.components)
self.assertFalse(test_args.keep)
self.assertEqual(test_args.logging_level, logging.INFO)
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_PATH)
@patch("argparse._sys.argv", [ARGS_PY, TEST_MANIFEST_PATH, "--paths", "opensearch=https://ci.opensearch.org/x/y", "--verbose"])
def test_opensearch_url_with_opensearch_test_manifest(self):
test_args = TestArgs()
self.assertEqual(test_args.paths.get("opensearch"), "https://ci.opensearch.org/x/y")
self.assertFalse(hasattr(test_args.paths, "opensearch-dashboards"))
self.assertIsNotNone(test_args.test_run_id)
self.assertIsNone(test_args.components)
self.assertFalse(test_args.keep)
self.assertEqual(test_args.logging_level, logging.DEBUG)
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_PATH)
@patch("argparse._sys.argv", [ARGS_PY, TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH, "--paths", "opensearch=" + TEST_MANIFEST_PATH])
def test_opensearch_dashboards_default_with_opensearch_dashboards_test_manifest(self):
test_args = TestArgs()
self.assertFalse(hasattr(test_args.paths, "opensearch-dashboards"))
self.assertEqual(test_args.paths.get("opensearch"), self.TEST_MANIFEST_PATH)
self.assertIsNotNone(test_args.test_run_id)
self.assertIsNone(test_args.components)
self.assertFalse(test_args.keep)
self.assertEqual(test_args.logging_level, logging.INFO)
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH)
@patch(
"argparse._sys.argv",
[
ARGS_PY,
TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH,
"--paths",
"opensearch-dashboards=" + TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH,
"opensearch=" + TEST_MANIFEST_PATH
]
)
def test_opensearch_dashboards_file_with_opensearch_dashboards_test_manifest(self):
test_args = TestArgs()
self.assertEqual(test_args.paths.get("opensearch-dashboards"), self.TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH)
self.assertEqual(test_args.paths.get("opensearch"), self.TEST_MANIFEST_PATH)
self.assertIsNotNone(test_args.test_run_id)
self.assertIsNone(test_args.components)
self.assertFalse(test_args.keep)
self.assertEqual(test_args.logging_level, logging.INFO)
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH)
@patch("argparse._sys.argv", [ARGS_PY, TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH, "--paths", "opensearch-dashboards=https://ci.opensearch.org/x/y", "opensearch=" + TEST_MANIFEST_PATH])
def test_opensearch_dashboards_url_with_opensearch_dashboards_test_manifest(self):
test_args = TestArgs()
self.assertEqual(test_args.paths.get("opensearch-dashboards"), "https://ci.opensearch.org/x/y")
self.assertEqual(test_args.paths.get("opensearch"), self.TEST_MANIFEST_PATH)
self.assertIsNotNone(test_args.test_run_id)
self.assertIsNone(test_args.components)
self.assertFalse(test_args.keep)
self.assertEqual(test_args.logging_level, logging.INFO)
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH)
@patch(
"argparse._sys.argv",
[
ARGS_PY,
TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH,
"--paths",
"opensearch=https://ci.opensearch.org/x/y",
"opensearch-dashboards=https://ci.opensearch.org/x/y/dashboards",
"--verbose"
]
)
def test_opensearch_url_opensearch_dashboards_url_with_opensearch_dashboards_test_manifest(self):
test_args = TestArgs()
self.assertEqual(test_args.paths.get("opensearch"), "https://ci.opensearch.org/x/y")
self.assertEqual(test_args.paths.get("opensearch-dashboards"), "https://ci.opensearch.org/x/y/dashboards")
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_OPENSEARCH_DASHBOARDS_PATH)
@patch("argparse._sys.argv", [ARGS_PY, TEST_MANIFEST_PATH, "--paths", "opensearch=" + TEST_MANIFEST_PATH, "--test-run-id", "6"])
def test_run_id(self):
test_args = TestArgs()
self.assertEqual(test_args.test_run_id, 6)
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_PATH)
@patch("argparse._sys.argv", [ARGS_PY, TEST_MANIFEST_PATH, "--paths", "opensearch=" + TEST_MANIFEST_PATH, "--verbose"])
def test_verbose(self):
test_args = TestArgs()
self.assertEqual(test_args.logging_level, logging.DEBUG)
self.assertEqual(test_args.test_manifest_path, self.TEST_MANIFEST_PATH)
| 46.678322
| 187
| 0.715805
| 807
| 6,675
| 5.567534
| 0.07311
| 0.108613
| 0.110394
| 0.138215
| 0.928333
| 0.894725
| 0.873136
| 0.867127
| 0.817271
| 0.764523
| 0
| 0.00036
| 0.166891
| 6,675
| 142
| 188
| 47.007042
| 0.807589
| 0
| 0
| 0.521008
| 0
| 0
| 0.15206
| 0.031161
| 0
| 0
| 0
| 0
| 0.420168
| 1
| 0.084034
| false
| 0
| 0.042017
| 0
| 0.168067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ffad01c5744b094aa85ed511acb6cd8a58e8435
| 108
|
py
|
Python
|
descwl_shear_sims/lsst_bits.py
|
LSSTDESC/descwl_shear_sims
|
1c696518104b7f301dd6c69571239431c6232110
|
[
"BSD-3-Clause"
] | null | null | null |
descwl_shear_sims/lsst_bits.py
|
LSSTDESC/descwl_shear_sims
|
1c696518104b7f301dd6c69571239431c6232110
|
[
"BSD-3-Clause"
] | 11
|
2019-12-10T23:30:27.000Z
|
2019-12-24T13:59:32.000Z
|
descwl_shear_sims/lsst_bits.py
|
LSSTDESC/wl-shear-testing-sims
|
6e4a0baa6f664b5bc52b08b55614eaa58c8b0748
|
[
"BSD-3-Clause"
] | null | null | null |
import lsst.afw.image as afw_image
def get_flagval(name):
return afw_image.Mask.getPlaneBitMask(name)
| 18
| 47
| 0.787037
| 17
| 108
| 4.823529
| 0.705882
| 0.292683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 108
| 5
| 48
| 21.6
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
3d4e671f35169a50ae3f10b3ed93e359d9464b2d
| 5,071
|
py
|
Python
|
src/models/inference_model.py
|
lightd22/draftisim
|
466de8c87d9782735e865fdd83fa5fd87f8a7719
|
[
"Apache-2.0"
] | 22
|
2017-12-26T12:10:12.000Z
|
2022-02-23T08:45:54.000Z
|
src/models/inference_model.py
|
lightd22/draftisim
|
466de8c87d9782735e865fdd83fa5fd87f8a7719
|
[
"Apache-2.0"
] | 7
|
2018-02-22T18:43:11.000Z
|
2021-08-25T14:46:43.000Z
|
src/models/inference_model.py
|
lightd22/draftisim
|
466de8c87d9782735e865fdd83fa5fd87f8a7719
|
[
"Apache-2.0"
] | 4
|
2018-01-14T15:57:38.000Z
|
2021-01-21T01:05:11.000Z
|
import tensorflow as tf
from . import base_model
class QNetInferenceModel(base_model.BaseModel):
def __init__(self, name, path):
super().__init__(name=name, path=path)
self.init_saver()
self.ops_dict = self.build_model()
def init_saver(self):
with self._graph.as_default():
self.saver = tf.train.import_meta_graph("{path}.ckpt.meta".format(path=self._path_to_model))
self.saver.restore(self.sess,"{path}.ckpt".format(path=self._path_to_model))
def build_model(self):
ops_dict = {}
with self._graph.as_default():
ops_dict["predict_q"] = tf.get_default_graph().get_tensor_by_name("online/valid_q_vals:0")
ops_dict["prediction"] = tf.get_default_graph().get_tensor_by_name("online/prediction:0")
ops_dict["input"] = tf.get_default_graph().get_tensor_by_name("online/inputs:0")
ops_dict["valid_actions"] = tf.get_default_graph().get_tensor_by_name("online/valid_actions:0")
return ops_dict
def predict(self, states):
"""
Feeds state into model and returns current predicted Q-values.
Args:
states (list of DraftStates): states to predict from
Returns:
predicted_Q (numpy array): model estimates of Q-values for actions from input states.
predicted_Q[k,:] holds Q-values for state states[k]
"""
inputs = [state.format_state() for state in states]
valid_actions = [state.get_valid_actions() for state in states]
feed_dict = {self.ops_dict["input"]:inputs,
self.ops_dict["valid_actions"]:valid_actions}
predicted_Q = self.sess.run(self.ops_dict["predict_q"], feed_dict=feed_dict)
return predicted_Q
def predict_action(self, states):
"""
Feeds state into model and return recommended action to take from input state based on estimated Q-values.
Args:
state (list of DraftStates): states to predict from
Returns:
predicted_action (numpy array): array of integer representations of actions recommended by model.
"""
inputs = [state.format_state() for state in states]
valid_actions = [state.get_valid_actions() for state in states]
feed_dict = {self.ops_dict["input"]:inputs,
self.ops_dict["valid_actions"]:valid_actions}
predicted_actions = self.sess.run(self.ops_dict["prediction"], feed_dict=feed_dict)
return predicted_actions
class SoftmaxInferenceModel(base_model.BaseModel):
def __init__(self, name, path):
super().__init__(name=name, path=path)
self.init_saver()
self.ops_dict = self.build_model()
def init_saver(self):
with self._graph.as_default():
self.saver = tf.train.import_meta_graph("{path}.ckpt.meta".format(path=self._path_to_model))
self.saver.restore(self.sess,"{path}.ckpt".format(path=self._path_to_model))
def build_model(self):
ops_dict = {}
with self._graph.as_default():
ops_dict["probabilities"] = tf.get_default_graph().get_tensor_by_name("softmax/action_probabilites:0")
ops_dict["prediction"] = tf.get_default_graph().get_tensor_by_name("softmax/predictions:0")
ops_dict["input"] = tf.get_default_graph().get_tensor_by_name("softmax/inputs:0")
ops_dict["valid_actions"] = tf.get_default_graph().get_tensor_by_name("softmax/valid_actions:0")
return ops_dict
def predict(self, states):
"""
Feeds state into model and returns current predicted probabilities.
Args:
states (list of DraftStates): states to predict from
Returns:
probabilities (numpy array): model estimates of probabilities for actions from input states.
probabilities[k,:] holds Q-values for state states[k]
"""
inputs = [state.format_state() for state in states]
valid_actions = [state.get_valid_actions() for state in states]
feed_dict = {self.ops_dict["input"]:inputs,
self.ops_dict["valid_actions"]:valid_actions}
probabilities = self.sess.run(self.ops_dict["probabilities"], feed_dict=feed_dict)
return probabilities
def predict_action(self, states):
"""
Feeds state into model and return recommended action to take from input state based on estimated Q-values.
Args:
state (list of DraftStates): states to predict from
Returns:
predicted_action (numpy array): array of integer representations of actions recommended by model.
"""
inputs = [state.format_state() for state in states]
valid_actions = [state.get_valid_actions() for state in states]
feed_dict = {self.ops_dict["input"]:inputs,
self.ops_dict["valid_actions"]:valid_actions}
predicted_actions = self.sess.run(self.ops_dict["prediction"], feed_dict=feed_dict)
return predicted_actions
| 46.953704
| 114
| 0.659239
| 657
| 5,071
| 4.82344
| 0.120244
| 0.057431
| 0.055538
| 0.042916
| 0.91196
| 0.872831
| 0.849164
| 0.849164
| 0.849164
| 0.823604
| 0
| 0.002064
| 0.235654
| 5,071
| 107
| 115
| 47.392523
| 0.815531
| 0.224611
| 0
| 0.75
| 0
| 0
| 0.111502
| 0.031394
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15625
| false
| 0
| 0.0625
| 0
| 0.34375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
180c7d7216fe6868d9bb1796dc58d2d6f12b4d53
| 1,564
|
py
|
Python
|
trisicell/datasets/__init__.py
|
faridrashidi/trisicell
|
4db89edd44c03ccb6c7d3477beff0079c3ff8035
|
[
"BSD-3-Clause"
] | 2
|
2021-07-02T13:53:15.000Z
|
2021-11-16T03:14:36.000Z
|
trisicell/datasets/__init__.py
|
faridrashidi/trisicell
|
4db89edd44c03ccb6c7d3477beff0079c3ff8035
|
[
"BSD-3-Clause"
] | 58
|
2021-06-14T17:14:39.000Z
|
2022-03-11T19:32:54.000Z
|
trisicell/datasets/__init__.py
|
faridrashidi/trisicell
|
4db89edd44c03ccb6c7d3477beff0079c3ff8035
|
[
"BSD-3-Clause"
] | null | null | null |
"""Datasets Module."""
from trisicell.datasets._datasets import (
acute_lymphocytic_leukemia1,
acute_lymphocytic_leukemia2,
acute_lymphocytic_leukemia3,
acute_lymphocytic_leukemia4,
acute_lymphocytic_leukemia5,
acute_lymphocytic_leukemia6,
colorectal1,
colorectal2,
colorectal3,
erbc,
example,
high_grade_serous_ovarian_cancer1,
high_grade_serous_ovarian_cancer2,
high_grade_serous_ovarian_cancer3,
high_grade_serous_ovarian_cancer_3celllines,
melanoma20,
muscle_invasive_bladder,
myeloproliferative_neoplasms18,
myeloproliferative_neoplasms78,
myeloproliferative_neoplasms712,
oligodendroglioma_idh_mutated_tumor,
renal_cell_carcinoma,
test,
tnbc,
)
from trisicell.datasets._simulate import add_doublets, add_noise, simulate
__all__ = (
acute_lymphocytic_leukemia1,
acute_lymphocytic_leukemia2,
acute_lymphocytic_leukemia3,
acute_lymphocytic_leukemia4,
acute_lymphocytic_leukemia5,
acute_lymphocytic_leukemia6,
colorectal1,
colorectal2,
colorectal3,
erbc,
example,
high_grade_serous_ovarian_cancer1,
high_grade_serous_ovarian_cancer2,
high_grade_serous_ovarian_cancer3,
high_grade_serous_ovarian_cancer_3celllines,
melanoma20,
muscle_invasive_bladder,
myeloproliferative_neoplasms18,
myeloproliferative_neoplasms78,
myeloproliferative_neoplasms712,
oligodendroglioma_idh_mutated_tumor,
renal_cell_carcinoma,
test,
tnbc,
simulate,
add_noise,
add_doublets,
)
| 26.066667
| 74
| 0.780691
| 149
| 1,564
| 7.604027
| 0.328859
| 0.169462
| 0.105914
| 0.15534
| 0.875552
| 0.875552
| 0.875552
| 0.875552
| 0.875552
| 0.875552
| 0
| 0.034109
| 0.175192
| 1,564
| 59
| 75
| 26.508475
| 0.844186
| 0.01023
| 0
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035714
| 0
| 0.035714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
182d1b84d428a5fd74bc875dc87987c3a17a73cf
| 31,936
|
py
|
Python
|
tests/unit/controllers/platform/test_initialize.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | 110
|
2020-01-15T22:58:46.000Z
|
2022-03-27T20:47:33.000Z
|
tests/unit/controllers/platform/test_initialize.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | 89
|
2020-01-15T23:18:34.000Z
|
2022-03-31T21:56:05.000Z
|
tests/unit/controllers/platform/test_initialize.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | 50
|
2020-01-15T22:58:53.000Z
|
2022-02-11T17:39:28.000Z
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import shutil
import mock
import unittest
from ebcli.core import fileoperations
from ebcli.core.ebcore import EB
from ebcli.core.ebpcore import EBP
from ebcli.controllers.platform import initialize
from ebcli.lib import aws
from ebcli.objects.platform import PlatformVersion
class TestInitialize(unittest.TestCase):
platform = PlatformVersion(
'arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5'
)
def setUp(self):
self.root_dir = os.getcwd()
if not os.path.exists('testDir'):
os.mkdir('testDir')
aws.set_region(None)
os.chdir('testDir')
def tearDown(self):
os.chdir(self.root_dir)
shutil.rmtree('testDir', ignore_errors=True)
class TestEBPlatform(TestInitialize):
def test_init__attempt_to_init_inside_application_workspace(self):
fileoperations.create_config_file(
'my-application',
'us-west-2',
'php',
)
app = EB(argv=['platform', 'init'])
app.setup()
with self.assertRaises(EnvironmentError) as context_manager:
app.run()
self.assertEqual(
'This directory is already initialized with an application workspace.',
str(context_manager.exception)
)
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region_from_inputs')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
def test_init__non_interactive_mode(
self,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_from_inputs_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_from_inputs_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', None)
set_up_credentials_mock.return_value = 'us-west-2'
app = EB(argv=['platform', 'init', 'my-custom-platform'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', False)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version=None,
workspace_type='Platform'
)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with(None)
set_workspace_to_latest_mock.assert_called_once_with()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region_from_inputs')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
def test_init__non_interactive_mode__keyname_specified(
self,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_from_inputs_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_from_inputs_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
app = EB(argv=['platform', 'init', 'my-custom-platform', '-k', 'keyname'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', False)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
@mock.patch('ebcli.controllers.platform.initialize.get_keyname')
def test_init__force_interactive_mode_by_not_specifying_the_platform(
self,
get_keyname_mock,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
get_keyname_mock.return_value = 'keyname'
app = EB(argv=['platform', 'init'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', True)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
get_region_mock.assert_called_once_with(None, True)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
@mock.patch('ebcli.controllers.platform.initialize.get_keyname')
def test_init__force_interactive_mode_by_passing_interactive_argument(
self,
get_keyname_mock,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
get_keyname_mock.return_value = 'keyname'
app = EB(argv=['platform', 'init', 'my-custom-platform', '-i'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', True)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
get_region_mock.assert_called_once_with(None, True)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
@mock.patch('ebcli.controllers.platform.initialize.get_keyname')
def test_init__force_interactive_mode_by_passing_interactive_argument_and_omitting_platform_argument(
self,
get_keyname_mock,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
get_keyname_mock.return_value = 'keyname'
app = EB(argv=['platform', 'init', '-i'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', True)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
get_region_mock.assert_called_once_with(None, True)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
@mock.patch('ebcli.controllers.platform.initialize.get_keyname')
def test_init__interactive_mode__pass_keyname_in_interactive(
self,
get_keyname_mock,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
app = EB(argv=['platform', 'init', '-k', 'keyname'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', True)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
get_region_mock.assert_called_once_with(None, True)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
get_keyname_mock.assert_not_called()
class TestEBP(TestInitialize):
def test_init__attempt_to_init_inside_application_workspace(self):
fileoperations.create_config_file(
'my-application',
'us-west-2',
'php',
)
app = EB(argv=['platform', 'init'])
app.setup()
with self.assertRaises(EnvironmentError) as context_manager:
app.run()
self.assertEqual(
'This directory is already initialized with an application workspace.',
str(context_manager.exception)
)
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region_from_inputs')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
def test_init__non_interactive_mode(
self,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_from_inputs_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_from_inputs_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', None)
set_up_credentials_mock.return_value = 'us-west-2'
app = EBP(argv=['init', 'my-custom-platform'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', False)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version=None,
workspace_type='Platform'
)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with(None)
set_workspace_to_latest_mock.assert_called_once_with()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region_from_inputs')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
def test_init__non_interactive_mode__keyname_specified(
self,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_from_inputs_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_from_inputs_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
app = EBP(argv=['init', 'my-custom-platform', '-k', 'keyname'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', False)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
@mock.patch('ebcli.controllers.platform.initialize.get_keyname')
def test_init__force_interactive_mode_by_not_specifying_the_platform(
self,
get_keyname_mock,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
get_keyname_mock.return_value = 'keyname'
app = EBP(argv=['init'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', True)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
get_region_mock.assert_called_once_with(None, True)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
@mock.patch('ebcli.controllers.platform.initialize.get_keyname')
def test_init__force_interactive_mode_by_passing_interactive_argument(
self,
get_keyname_mock,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
get_keyname_mock.return_value = 'keyname'
app = EBP(argv=['init', 'my-custom-platform', '-i'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', True)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
get_region_mock.assert_called_once_with(None, True)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
@mock.patch('ebcli.controllers.platform.initialize.get_keyname')
def test_init__force_interactive_mode_by_passing_interactive_argument_and_omitting_platform_argument(
self,
get_keyname_mock,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
get_keyname_mock.return_value = 'keyname'
app = EBP(argv=['init', '-i'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', True)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
get_region_mock.assert_called_once_with(None, True)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.set_workspace_to_latest')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.write_keyname')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.touch_config_folder')
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_region')
@mock.patch('ebcli.controllers.platform.initialize.commonops.set_up_credentials')
@mock.patch('ebcli.controllers.platform.initialize.aws.set_region')
@mock.patch('ebcli.controllers.platform.initialize.initializeops.setup')
@mock.patch('ebcli.controllers.platform.initialize.get_platform_name_and_version')
@mock.patch('ebcli.controllers.platform.initialize.get_keyname')
def test_init__interactive_mode__pass_keyname_in_interactive(
self,
get_keyname_mock,
get_platform_name_and_version_mock,
setup_mock,
set_region_mock,
set_up_credentials_mock,
get_region_mock,
touch_config_folder_mock,
write_keyname_mock,
set_workspace_to_latest_mock
):
get_region_mock.return_value = 'us-west-2'
get_platform_name_and_version_mock.return_value = ('my-custom-platform', '1.0.3')
set_up_credentials_mock.return_value = 'us-west-2'
app = EBP(argv=['init', '-k', 'keyname'])
app.setup()
app.run()
set_region_mock.assert_has_calls([mock.call(None), mock.call('us-west-2')])
set_up_credentials_mock.assert_called_once_with(None, 'us-west-2', True)
setup_mock.assert_called_once_with(
'Custom Platform Builder',
'us-west-2',
None,
platform_name='my-custom-platform',
platform_version='1.0.3',
workspace_type='Platform'
)
get_region_mock.assert_called_once_with(None, True)
touch_config_folder_mock.assert_called_once_with()
write_keyname_mock.assert_called_once_with('keyname')
set_workspace_to_latest_mock.assert_not_called()
get_keyname_mock.assert_not_called()
class TestGenericPlatformInitController(unittest.TestCase):
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_default_keyname')
@mock.patch('ebcli.controllers.platform.initialize.sshops.prompt_for_ec2_keyname')
def test_get_keyname__found_default_keyname(
self,
prompt_for_ec2_keyname_mock,
get_default_keyname_mock
):
get_default_keyname_mock.return_value = 'keyname'
self.assertEqual(
'keyname',
initialize.get_keyname()
)
prompt_for_ec2_keyname_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.commonops.get_default_keyname')
@mock.patch('ebcli.controllers.platform.initialize.sshops.prompt_for_ec2_keyname')
def test_get_keyname__could_not_find_default_keyname(
self,
prompt_for_ec2_keyname_mock,
get_default_keyname_mock
):
get_default_keyname_mock.return_value = None
prompt_for_ec2_keyname_mock.return_value = 'keyname'
self.assertEqual(
'keyname',
initialize.get_keyname()
)
prompt_for_ec2_keyname_mock.assert_called_once_with(
message='Would you like to be able to log into your platform packer environment?'
)
@mock.patch('ebcli.controllers.platform.initialize.platformops.get_platform_name_and_version_interactive')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.get_platform_name')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.get_platform_version')
def test_get_platform_name_and_version__platform_name_specified__non_interactive_flow(
self,
get_platform_version_mock,
get_platform_name_mock,
get_platform_name_and_version_interactive_mock
):
self.assertEqual(
('my-custom-platform', None),
initialize.get_platform_name_and_version('my-custom-platform')
)
get_platform_version_mock.assert_not_called()
get_platform_name_mock.assert_not_called()
get_platform_name_and_version_interactive_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.get_platform_name_and_version_interactive')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.get_platform_name')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.get_platform_version')
def test_get_platform_name_and_version__platform_name_not_specified__force_interactive_flow__default_platform_found(
self,
get_platform_version_mock,
get_platform_name_mock,
get_platform_name_and_version_interactive_mock
):
get_platform_name_mock.return_value = 'my-custom-platform'
get_platform_version_mock.return_value = '1.0.3'
self.assertEqual(
('my-custom-platform', '1.0.3'),
initialize.get_platform_name_and_version(None)
)
get_platform_name_mock.assert_called_once_with(default=None)
get_platform_version_mock.assert_called_once_with()
get_platform_name_and_version_interactive_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.initialize.platformops.get_platform_name_and_version_interactive')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.get_platform_name')
@mock.patch('ebcli.controllers.platform.initialize.fileoperations.get_platform_version')
def test_get_platform_name_and_version__platform_name_not_specified__default_platform_not_found__customer_prompted_for_paltform_name(
self,
get_platform_version_mock,
get_platform_name_mock,
get_platform_name_and_version_interactive_mock
):
get_platform_name_mock.side_effect = initialize.NotInitializedError
get_platform_name_and_version_interactive_mock.return_value = ('my-custom-platform', '1.0.3')
self.assertEqual(
('my-custom-platform', '1.0.3'),
initialize.get_platform_name_and_version(None)
)
get_platform_name_mock.assert_called_once_with(default=None)
get_platform_version_mock.assert_not_called()
get_platform_name_and_version_interactive_mock.assert_called_once_with()
| 45.492877
| 137
| 0.704346
| 3,877
| 31,936
| 5.410369
| 0.052618
| 0.090008
| 0.135011
| 0.139445
| 0.938072
| 0.934687
| 0.931827
| 0.927536
| 0.927536
| 0.926058
| 0
| 0.006119
| 0.196549
| 31,936
| 701
| 138
| 45.557775
| 0.811372
| 0.016784
| 0
| 0.873194
| 0
| 0.001605
| 0.315449
| 0.24182
| 0
| 0
| 0
| 0
| 0.163724
| 1
| 0.033708
| false
| 0.009631
| 0.016051
| 0
| 0.057785
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1833e464f0320b1f3a8a9a05eebe3f1330f2d5bf
| 1,864
|
py
|
Python
|
splitting.py
|
maxime-tournier/lcpy
|
1419901a089cd088edd078397958b47c25cc2f4f
|
[
"MIT"
] | null | null | null |
splitting.py
|
maxime-tournier/lcpy
|
1419901a089cd088edd078397958b47c25cc2f4f
|
[
"MIT"
] | null | null | null |
splitting.py
|
maxime-tournier/lcpy
|
1419901a089cd088edd078397958b47c25cc2f4f
|
[
"MIT"
] | null | null | null |
"""matrix-splitting iterations"""
import numpy as np
import math
def gs(x, A, b, **kwargs):
"""gauss-seidel"""
d = np.diag(A)
n = b.size
error = 1
omega = kwargs.get('omega', 1)
old = np.copy(x)
delta = np.zeros(n)
while error > 0:
for i in xrange(n):
x[i] += omega * (b[i] - A[i, :].dot(x)) / d[i]
delta[:] = x - old
error = math.sqrt(delta.dot(delta))
yield error
old[:] = x
def jacobi(x, A, b, **kwargs):
"""jacobi"""
d = kwargs.get('diag', np.diag(A))
n = b.size
omega = kwargs.get('omega', 2 / n)
error = 1
old = np.copy(x)
delta = np.zeros(n)
while error > 0:
x += omega * (b - A.dot(x)) / d
delta[:] = x - old
error = math.sqrt(delta.dot(delta))
yield error
old[:] = x
def pgs(x, A, b, **kwargs):
"""projected gauss-seidel"""
d = kwargs.get('diag', np.diag(A))
n = b.size
omega = kwargs.get('omega', 1)
error = 1
old = np.copy(x)
delta = np.zeros(n)
while error > 0:
for i in xrange(n):
x[i] += omega * (b[i] - A[i, :].dot(x)) / d[i]
if x[i] < 0: x[i] = 0
delta[:] = x - old
error = math.sqrt(delta.dot(delta))
yield error
old[:] = x
def pjacobi(x, A, b, **kwargs):
"""projected jacobi"""
d = kwargs.get('diag', np.diag(A))
n = b.size
omega = kwargs.get('omega', 2 / float(n))
error = 1
old = np.copy(x)
delta = np.zeros(n)
zero = np.zeros(n)
while error > 0:
x += omega * (b - A.dot(x)) / d
x[:] = np.maximum(x, zero)
delta[:] = x - old
error = math.sqrt(delta.dot(delta))
yield
old[:] = x
| 16.945455
| 58
| 0.440987
| 266
| 1,864
| 3.090226
| 0.172932
| 0.076642
| 0.048662
| 0.043796
| 0.812652
| 0.744526
| 0.72871
| 0.72871
| 0.72871
| 0.72871
| 0
| 0.012132
| 0.380901
| 1,864
| 109
| 59
| 17.100917
| 0.700173
| 0.046674
| 0
| 0.779661
| 0
| 0
| 0.018317
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067797
| false
| 0
| 0.033898
| 0
| 0.101695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
183bd1e037cddcb073013a1b08e8965212ca7dda
| 7,077
|
py
|
Python
|
test/test_vsql_binop_add.py
|
LivingLogic/LivingApps.Python.LivingAPI
|
70bb71d7f582535a4c52e1f00d9ed070f3f2cc4f
|
[
"MIT"
] | 2
|
2017-09-15T15:28:23.000Z
|
2019-01-25T09:23:53.000Z
|
test/test_vsql_binop_add.py
|
LivingLogic/LivingApps.Python.LivingAPI
|
70bb71d7f582535a4c52e1f00d9ed070f3f2cc4f
|
[
"MIT"
] | 1
|
2019-01-28T08:06:23.000Z
|
2019-01-28T14:45:52.000Z
|
test/test_vsql_binop_add.py
|
LivingLogic/LivingApps.Python.LivingAPI
|
70bb71d7f582535a4c52e1f00d9ed070f3f2cc4f
|
[
"MIT"
] | 1
|
2019-01-25T21:20:55.000Z
|
2019-01-25T21:20:55.000Z
|
"""
Tests for the vSQL addition operator ``+``.
The test are done via the Python DB interface.
To run the tests, :mod:`pytest` is required.
"""
from conftest import *
###
### Tests
###
d1 = "@(2000-02-29)"
d2 = "@(2000-03-01)"
dt1 = "@(2000-02-29T12:34:56)"
dt2 = "@(2000-03-01T12:34:56)"
def test_bool_bool1(config_persons):
check_vsql(config_persons, "app.p_bool_none.value + True is None")
def test_bool_bool2(config_persons):
check_vsql(config_persons, "app.p_bool_false.value + True == 1")
def test_bool_bool3(config_persons):
check_vsql(config_persons, "app.p_bool_true.value + True == 2")
def test_bool_int(config_persons):
check_vsql(config_persons, "app.p_bool_true.value + 1 == 2")
def test_bool_number(config_persons):
check_vsql(config_persons, "app.p_bool_true.value + 1.5 == 2.5")
def test_int_bool(config_persons):
check_vsql(config_persons, "1 + app.p_bool_true.value == 2")
def test_int_int(config_persons):
check_vsql(config_persons, "1 + app.p_int_value.value == 1778")
def test_int_number(config_persons):
check_vsql(config_persons, "1 + app.p_number_value.value == 43.5")
def test_str_str1(config_persons):
check_vsql(config_persons, "'gurk' + app.p_str_none.value == 'gurk'")
def test_str_str2(config_persons):
check_vsql(config_persons, "'gurk' + app.p_str_value.value == 'gurkgurk'")
def test_intlist_intlist(config_persons):
check_vsql(config_persons, "[1, 2] + [3, 4] == [1, 2, 3, 4]")
def test_intlist_numberlist(config_persons):
check_vsql(config_persons, "[1, 2] + [3.5, 4.5] == [1.0, 2.0, 3.5, 4.5]")
def test_numberlist_intlist(config_persons):
check_vsql(config_persons, "[1.5, 2.5] + [3, 4] == [1.5, 2.5, 3.0, 4.0]")
def test_numberlist_numberlist(config_persons):
check_vsql(config_persons, "[1.5, 2.5] + [3.5, 4.5] == [1.5, 2.5, 3.5, 4.5]")
def test_strlist_strlist(config_persons):
check_vsql(config_persons, "['gurk', 'hurz'] + ['hinz', 'kunz'] == ['gurk', 'hurz', 'hinz', 'kunz']")
def test_datelist_datelist(config_persons):
check_vsql(config_persons, "[@(2000-02-29), @(2000-03-01)] + [@(2000-03-02), @(2000-03-03)] == [@(2000-02-29), @(2000-03-01), @(2000-03-02), @(2000-03-03)]")
def test_datetimelist_datetimelist(config_persons):
check_vsql(config_persons, "[@(2000-02-29T12:34:56), @(2000-03-01T12:34:56)] + [@(2000-03-02T12:34:56), @(2000-03-03T12:34:56)] == [@(2000-02-29T12:34:56), @(2000-03-01T12:34:56), @(2000-03-02T12:34:56), @(2000-03-03T12:34:56)]")
def test_date_datedelta(config_persons):
check_vsql(config_persons, "app.p_date_value.value + days(1) == @(2000-03-01)")
def test_date_monthdelta(config_persons):
check_vsql(config_persons, "@(2000-01-31) + months(1) == app.p_date_value.value")
def test_datetime_datedelta(config_persons):
check_vsql(config_persons, "app.p_datetime_value.value + days(1) == @(2000-03-01T12:34:56)")
def test_datetime_datetimedelta(config_persons):
check_vsql(config_persons, "app.p_datetime_value.value + timedelta(1, 1) == @(2000-03-01T12:34:57)")
def test_datetime_monthdelta(config_persons):
check_vsql(config_persons, "@(2000-01-31T12:34:56) + months(1) == app.p_datetime_value.value")
def test_monthdelta_date(config_persons):
check_vsql(config_persons, "months(1) + @(2000-01-31) == app.p_date_value.value")
def test_monthdelta_datetime(config_persons):
check_vsql(config_persons, "months(1) + @(2000-01-31T12:34:56) == app.p_datetime_value.value")
def test_datedelta_datedelta(config_persons):
check_vsql(config_persons, "app.p_datedelta_value.value + days(12) == days(24)")
def test_datedelta_datetimedelta(config_persons):
check_vsql(config_persons, "app.p_datedelta_value.value + timedelta(1, 1) == timedelta(13, 1)")
def test_datetimedelta_datedelta(config_persons):
check_vsql(config_persons, "app.p_datetimedelta_value.value + days(12) == timedelta(13, (12 * 60 + 34) * 60 + 56)")
def test_datetimedelta_datetimedelta(config_persons):
check_vsql(config_persons, "app.p_datetimedelta_value.value + timedelta(2, (12 * 60 + 34) * 60 + 56) == timedelta(4, (1 * 60 + 9) * 60 + 52)")
def test_monthdelta_monthdelta(config_persons):
check_vsql(config_persons, "app.p_monthdelta_value.value + months(9) == months(12)")
def test_nulllist_nulllist1(config_persons):
check_vsql(config_persons, "[] + [] == []")
def test_nulllist_nulllist2(config_persons):
check_vsql(config_persons, "[None, None] + [None] == [None, None, None]")
def test_nulllist_intlist1(config_persons):
check_vsql(config_persons, "[] + [1, None, 2] == [1, None, 2]")
def test_nulllist_intlist2(config_persons):
check_vsql(config_persons, "[None, None] + [1, None, 2] == [None, None, 1, None, 2]")
def test_nulllist_numberlist1(config_persons):
check_vsql(config_persons, "[] + [1.1, None, 2.2] == [1.1, None, 2.2]")
def test_nulllist_numberlist2(config_persons):
check_vsql(config_persons, "[None, None] + [1.1, None, 2.2] == [None, None, 1.1, None, 2.2]")
def test_nulllist_strlist1(config_persons):
check_vsql(config_persons, "[] + ['gurk', None, 'hurz'] == ['gurk', None, 'hurz']")
def test_nulllist_strlist2(config_persons):
check_vsql(config_persons, "[None, None] + ['gurk', None, 'hurz'] == [None, None, 'gurk', None, 'hurz']")
def test_nulllist_datelist1(config_persons):
check_vsql(config_persons, f"[] + [{d1}, None, {d2}] == [{d1}, None, {d2}]")
def test_nulllist_datelist2(config_persons):
check_vsql(config_persons, f"[None, None] + [{d1}, None, {d2}] == [None, None, {d1}, None, {d2}]")
def test_nulllist_datetimelist1(config_persons):
check_vsql(config_persons, f"[] + [{dt1}, None, {dt2}] == [{dt1}, None, {dt2}]")
def test_nulllist_datetimelist2(config_persons):
check_vsql(config_persons, f"[None, None] + [{dt1}, None, {dt2}] == [None, None, {dt1}, None, {dt2}]")
def test_intlist_nulllist1(config_persons):
check_vsql(config_persons, "[1, None, 2] + [] == [1, None, 2]")
def test_intlist_nulllist2(config_persons):
check_vsql(config_persons, "[1, None, 2] + [None, None] == [1, None, 2, None, None]")
def test_numberlist_nulllist1(config_persons):
check_vsql(config_persons, "[1.1, None, 2.2] + [] == [1.1, None, 2.2]")
def test_numberlist_nulllist2(config_persons):
check_vsql(config_persons, "[1.1, None, 2.2] + [None, None] == [1.1, None, 2.2, None, None]")
def test_strlist_nulllist1(config_persons):
check_vsql(config_persons, "['gurk', None, 'hurz'] + [] == ['gurk', None, 'hurz']")
def test_strlist_nulllist2(config_persons):
check_vsql(config_persons, "['gurk', None, 'hurz'] + [None, None] == ['gurk', None, 'hurz', None, None]")
def test_datelist_nulllist1(config_persons):
check_vsql(config_persons, f"[{d1}, None, {d2}] + [] == [{d1}, None, {d2}]")
def test_datelist_nulllist2(config_persons):
check_vsql(config_persons, f"[{d1}, None, {d2}] + [None, None] == [{d1}, None, {d2}, None, None]")
def test_datetimelist_nulllist1(config_persons):
check_vsql(config_persons, f"[{dt1}, None, {dt2}] + [] == [{dt1}, None, {dt2}]")
def test_datetimelist_nulllist2(config_persons):
check_vsql(config_persons, f"[{dt1}, None, {dt2}] + [None, None] == [{dt1}, None, {dt2}, None, None]")
| 40.672414
| 230
| 0.705242
| 1,086
| 7,077
| 4.320442
| 0.096685
| 0.282609
| 0.195652
| 0.23913
| 0.7711
| 0.734655
| 0.710571
| 0.641944
| 0.534101
| 0.370205
| 0
| 0.084583
| 0.104564
| 7,077
| 173
| 231
| 40.907514
| 0.655831
| 0.020348
| 0
| 0
| 0
| 0.233645
| 0.421715
| 0.119416
| 0
| 0
| 0
| 0
| 0
| 1
| 0.476636
| false
| 0
| 0.009346
| 0
| 0.485981
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
184a1986e084f23159745bb5ae2204c9cbaa6595
| 173
|
py
|
Python
|
Codewars/7kyu/digital-cypher/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/7kyu/digital-cypher/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/7kyu/digital-cypher/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
Test.assert_equals(encode('scout', 1939), [20, 12, 18, 30, 21])
Test.assert_equals(encode('masterpiece', 1939), [14, 10, 22, 29, 6, 27, 19, 18, 6, 12, 8])
| 34.6
| 90
| 0.624277
| 32
| 173
| 3.3125
| 0.71875
| 0.188679
| 0.301887
| 0.415094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.27027
| 0.144509
| 173
| 4
| 91
| 43.25
| 0.445946
| 0.080925
| 0
| 0
| 0
| 0
| 0.101911
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18681bef008d4145eba8c64a02230cedbdcbd88f
| 2,250
|
py
|
Python
|
parsers/tests/test_iupredparser.py
|
rigdenlab/conkit-web
|
bf50d28a73f43b9eb0e0c397ec1d0fd32547fdf1
|
[
"BSD-3-Clause"
] | 1
|
2020-04-16T16:52:53.000Z
|
2020-04-16T16:52:53.000Z
|
parsers/tests/test_iupredparser.py
|
rigdenlab/conplot
|
9b3129d9e1b7ed93da63c6fd31f9b50e63f2d4d9
|
[
"BSD-3-Clause"
] | 47
|
2020-05-11T13:59:11.000Z
|
2022-01-21T09:37:18.000Z
|
parsers/tests/test_iupredparser.py
|
rigdenlab/conkit-web
|
bf50d28a73f43b9eb0e0c397ec1d0fd32547fdf1
|
[
"BSD-3-Clause"
] | 5
|
2020-04-24T11:19:21.000Z
|
2020-05-06T08:01:36.000Z
|
import unittest
from parsers import IupredParser, DisorderStates
from utils.exceptions import InvalidFormat
class IupredParserTestCase(unittest.TestCase):
def test_1(self):
dummy_prediction = """# IUPred2A: context-dependent prediction of protein disorder as a function of redox state and protein binding
# Balint Meszaros, Gabor Erdos, Zsuzsanna Dosztanyi
# Nucleic Acids Research 2018, Submitted
# POS AMINO ACID IUPRED SCORE ANCHOR SCORE
1 M 0.5000 0.0076
2 S 0.0083 0.0074
3 L 0.6108 0.0073
4 E 0.5040 0.0072
5 A 0.4093 0.0068
6 T 0.0113 0.0067
7 V 0.7000 0.0061
8 L 0.9800 0.0057
9 D 0.1065 0.0056
10 L 0.3482 0.0050
"""
expected = [
DisorderStates.DISORDER.value,
DisorderStates.ORDER.value,
DisorderStates.DISORDER.value,
DisorderStates.DISORDER.value,
DisorderStates.ORDER.value,
DisorderStates.ORDER.value,
DisorderStates.DISORDER.value,
DisorderStates.DISORDER.value,
DisorderStates.ORDER.value,
DisorderStates.ORDER.value,
]
output = IupredParser(dummy_prediction)
self.assertEqual(10, len(output))
self.assertListEqual(expected, output)
def test_2(self):
dummy_prediction = """# IUPred2A: context-dependent prediction of protein disorder as a function of redox state and protein binding
# Balint Meszaros, Gabor Erdos, Zsuzsanna Dosztanyi
# Nucleic Acids Research 2018, Submitted
# POS AMINO ACID IUPRED SCORE ANCHOR SCORE
1 M 0.X000 0.0076
2 S 0.0083 0.0074
3 L 0.6108 0.0073
4 E 0.5040 0.0072
5 A 0.4093 0.0068
6 T 0.0113 0.0067
7 V 0.7000 0.0061
8 L 0.9800 0.0057
9 D 0.1065 0.0056
10 L 0.3482 0.0050
"""
with self.assertRaises(InvalidFormat):
output = IupredParser(dummy_prediction)
def test_3(self):
dummy_prediction = """# IUPred2A: context-dependent prediction of protein disorder as a function of redox state and protein binding
# Balint Meszaros, Gabor Erdos, Zsuzsanna Dosztanyi
# Nucleic Acids Research 2018, Submitted
# POS AMINO ACID IUPRED SCORE ANCHOR SCORE
"""
with self.assertRaises(InvalidFormat):
output = IupredParser(dummy_prediction)
| 28.125
| 139
| 0.691111
| 316
| 2,250
| 4.892405
| 0.300633
| 0.110608
| 0.087322
| 0.1326
| 0.826649
| 0.826649
| 0.826649
| 0.812419
| 0.727038
| 0.727038
| 0
| 0.140279
| 0.236444
| 2,250
| 79
| 140
| 28.481013
| 0.759604
| 0
| 0
| 0.786885
| 0
| 0
| 0.505333
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 1
| 0.04918
| false
| 0
| 0.04918
| 0
| 0.114754
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18819ffe6accdab20e6590a24309d1cdb1991017
| 150
|
py
|
Python
|
bash_scripts/try_docker.py
|
koukoulala/fairseq
|
74261e0b6f455a01e2e26e8c0e7fe20524b99441
|
[
"MIT"
] | null | null | null |
bash_scripts/try_docker.py
|
koukoulala/fairseq
|
74261e0b6f455a01e2e26e8c0e7fe20524b99441
|
[
"MIT"
] | null | null | null |
bash_scripts/try_docker.py
|
koukoulala/fairseq
|
74261e0b6f455a01e2e26e8c0e7fe20524b99441
|
[
"MIT"
] | null | null | null |
import torch
import torch.utils
import torch.utils.cpp_extension
print(torch.__version__)
print(torch.version.cuda)
print(torch.cuda.is_available())
| 18.75
| 32
| 0.826667
| 22
| 150
| 5.363636
| 0.454545
| 0.279661
| 0.271186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 150
| 8
| 33
| 18.75
| 0.842857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
43fa857e205d32e865c5ce51b769d895679eba84
| 18,552
|
py
|
Python
|
src/plantenergy/OptimizationGroups.py
|
BYUFLOWLab/WakeExchange
|
5fff32e38723039fe38b0cce0b4560df813de63e
|
[
"Apache-2.0"
] | 4
|
2017-01-24T17:56:29.000Z
|
2019-02-26T02:01:51.000Z
|
src/plantenergy/OptimizationGroups.py
|
BYUFLOWLab/WakeExchange
|
5fff32e38723039fe38b0cce0b4560df813de63e
|
[
"Apache-2.0"
] | 4
|
2018-09-24T14:11:01.000Z
|
2019-03-07T14:51:31.000Z
|
src/plantenergy/OptimizationGroups.py
|
BYUFLOWLab/WakeExchange
|
5fff32e38723039fe38b0cce0b4560df813de63e
|
[
"Apache-2.0"
] | 3
|
2019-06-25T16:46:28.000Z
|
2020-02-23T10:04:50.000Z
|
#!/usr/bin/env python
# encoding: utf-8
"""
OptimizationGroups.py
Created by Jared J. Thomas, Nov. 2015.
Brigham Young University
"""
import numpy as np
from openmdao.api import Group, IndepVarComp, ExecComp
from plantenergy.GeneralWindFarmGroups import DirectionGroup, AEPGroup
from plantenergy.GeneralCOEGroups import COEGroup
from plantenergy.GeneralWindFarmComponents import SpacingComp, BoundaryComp # cost model functions are defunct: calcICC, calcFCR, calcLLC, calcLRC, calcOandM
from plantenergy.floris import floris_wrapper, add_floris_params_IndepVarComps
import warnings
class OptAEP(Group):
"""
Group adding optimization parameters to an AEPGroup
----------------
Design Variables
----------------
turbineX: 1D numpy array containing the x coordinates of each turbine in the global reference frame
turbineY: 1D numpy array containing the x coordinates of each turbine in the global reference frame
yaw_i: 1D numpy array containing the yaw angle of each turbine in the wind direction reference frame for
direction i
---------------
Constant Inputs
---------------
rotorDiameter: 1D numpy array containing the rotor diameter of each turbine
axialInduction: 1D numpy array containing the axial induction of each turbine. These
values are not actually used unless the appropriate floris_param is set.
generator_efficiency: 1D numpy array containing the efficiency of each turbine generator
wind_speed: scalar containing a generally applied inflow wind speed
air_density: scalar containing the inflow air density
windDirections: 1D numpy array containing the angle from N CW to the inflow direction
windrose_frequencies: 1D numpy array containing the probability of each wind direction
Ct: 1D numpy array containing the thrust coefficient of each turbine
Cp: 1D numpy array containing the power coefficient of each turbine
floris_params:FLORISoriginal(False): boolean specifying which formulation of the FLORIS model to use. (True
specfies to use the model as originally formulated and published).
floris_params:CPcorrected(True): boolean specifying whether the Cp values provided have been adjusted
for yaw
floris_params:CTcorrected(True): boolean specifying whether the Ct values provided have been adjusted
for yaw
-------
Returns
-------
AEP: scalar containing the final AEP for the wind farm
power_directions: 1D numpy array containing the power production for each wind direction (unweighted)
velocitiesTurbines: 1D numpy array of velocity at each turbine in each direction. Currently only accessible by
*.AEPgroup.dir%i.unknowns['velocitiesTurbines']
wt_powers: 1D numpy array of power production at each turbine in each direction. Currently only accessible by
*.AEPgroup.dir%i.unknowns['velocitiesTurbines']
"""
def __init__(self, nTurbines, nDirections=1, minSpacing=2., use_rotor_components=True,
datasize=0, differentiable=True, force_fd=False, nVertices=0, wake_model=floris_wrapper,
wake_model_options=None, params_IdepVar_func=add_floris_params_IndepVarComps,
params_IndepVar_args={'use_rotor_components': False}, cp_points=1, cp_curve_spline=None,
rec_func_calls=False):
# print("initializing OptAEP Group")
super(OptAEP, self).__init__()
if wake_model_options is None:
wake_model_options = {'differentiable': differentiable, 'use_rotor_components': use_rotor_components,
'nSamples': 0, 'verbose': False}
try:
nSamples = wake_model_options['nSamples']
except:
nSamples = 0
if force_fd:
self.deriv_options['type'] = 'fd'
self.deriv_options['form'] = 'forward'
# ##### add major components and groups
# add group that calculates AEP
self.add('AEPgroup', AEPGroup(nTurbines=nTurbines, nDirections=nDirections,
use_rotor_components=use_rotor_components,
datasize=datasize, differentiable=differentiable, wake_model=wake_model,
wake_model_options=wake_model_options,
params_IdepVar_func=params_IdepVar_func,
params_IndepVar_args=params_IndepVar_args, nSamples=nSamples,
cp_points=cp_points, cp_curve_spline=cp_curve_spline,
rec_func_calls=rec_func_calls),
promotes=['*'])
# add component that calculates spacing between each pair of turbines
self.add('spacing_comp', SpacingComp(nTurbines=nTurbines), promotes=['*'])
if nVertices > 0:
# add component that enforces a convex hull wind farm boundary
self.add('boundary_con', BoundaryComp(nVertices=nVertices, nTurbines=nTurbines), promotes=['*'])
self.add('bv0', IndepVarComp('boundary_radius', val=1000., units='m',
pass_by_obj=True, desc='radius of wind farm boundary'), promotes=['*'])
self.add('bv1', IndepVarComp('boundary_center', val=np.array([0., 0.]), units='m', pass_by_obj=True,
desc='x and y positions of circular wind farm boundary center'), promotes=['*'])
else:
warnings.warn("nVertices has been set to zero. No boundary constraints can be used unless nVertices > 0",
RuntimeWarning)
# ##### add constraint definitions
# self.add('s0', IndepVarComp('minSpacing', np.array([minSpacing]), units='m',
# pass_by_obj=True, desc='minimum allowable spacing between wind turbines'), promotes=['*'])
self.add('spacing_con', ExecComp('sc = wtSeparationSquared-(minSpacing*rotorDiameter[0])**2',
minSpacing=np.array([minSpacing]), rotorDiameter=np.zeros(nTurbines),
sc=np.zeros(int(((nTurbines-1.)*nTurbines/2.))),
wtSeparationSquared=np.zeros(int(((nTurbines-1.)*nTurbines/2.)))),
promotes=['*'])
# add objective component
self.add('obj_comp', ExecComp('obj = -1.*AEP', AEP=0.0), promotes=['*'])
class OptCOE(Group):
def __init__(self, nTurbines, nDirections=1, minSpacing=2., use_rotor_components=True,
datasize=0, differentiable=True, force_fd=False, nVertices=0, wake_model=floris_wrapper,
wake_model_options=None, params_IdepVar_func=add_floris_params_IndepVarComps,
params_IndepVar_args={'use_rotor_components': False}, cp_points=1, cp_curve_spline=None,
rec_func_calls=False):
# print("initializing OptCOE Group")
super(OptCOE, self).__init__()
if wake_model_options is None:
wake_model_options = {'differentiable': differentiable, 'use_rotor_components': use_rotor_components,
'nSamples': 0, 'verbose': False}
try:
nSamples = wake_model_options['nSamples']
except:
nSamples = 0
if force_fd:
self.deriv_options['type'] = 'fd'
self.deriv_options['form'] = 'forward'
# ##### add major components and groups
# add group that calculates AEP
self.add('COEgroup', COEGroup(nTurbines=nTurbines, nDirections=nDirections,
use_rotor_components=use_rotor_components,
datasize=datasize, differentiable=differentiable, wake_model=wake_model,
wake_model_options=wake_model_options,
params_IdepVar_func=params_IdepVar_func,
params_IndepVar_args=params_IndepVar_args, nSamples=nSamples,
cp_points=cp_points, cp_curve_spline=cp_curve_spline,
rec_func_calls=rec_func_calls),
promotes=['*'])
# add component that calculates spacing between each pair of turbines
self.add('spacing_comp', SpacingComp(nTurbines=nTurbines), promotes=['*'])
if nVertices > 0:
# add component that enforces a convex hull wind farm boundary
self.add('boundary_con', BoundaryComp(nVertices=nVertices, nTurbines=nTurbines), promotes=['*'])
self.add('bv0', IndepVarComp('boundary_radius', val=1000., units='m',
pass_by_obj=True, desc='radius of wind farm boundary'), promotes=['*'])
self.add('bv1', IndepVarComp('boundary_center', val=np.array([0., 0.]), units='m', pass_by_obj=True,
desc='x and y positions of circular wind farm boundary center'), promotes=['*'])
else:
warnings.warn("nVertices has been set to zero. No boundary constraints can be used unless nVertices > 0",
RuntimeWarning)
# ##### add constraint definitions
# self.add('s0', IndepVarComp('minSpacing', np.array([minSpacing]), units='m',
# pass_by_obj=True, desc='minimum allowable spacing between wind turbines'), promotes=['*'])
self.add('spacing_con', ExecComp('sc = wtSeparationSquared-(minSpacing*rotorDiameter[0])**2',
minSpacing=np.array([minSpacing]), rotorDiameter=np.zeros(nTurbines),
sc=np.zeros(int(((nTurbines-1.)*nTurbines/2.))),
wtSeparationSquared=np.zeros(int(((nTurbines-1.)*nTurbines/2.)))),
promotes=['*'])
# add objective component
#self.add('obj_comp', ExecComp('obj = -1.*AEP', AEP=0.0), promotes=['*'])
self.add('obj_comp', ExecComp('obj = -1000000./coe', coe=0.0), promotes=['*'])
# Currently unused code
'''
class OptPowerOneDir(Group):
""" Group connecting the floris model for optimization with one wind direction"""
def __init__(self, nTurbines, resolution=0, minSpacing=2., differentiable=True, use_rotor_components=True):
super(OptPowerOneDir, self).__init__()
# add major components
self.add('dirComp', AEPGroup(nTurbines, differentiable=differentiable,
use_rotor_components=use_rotor_components), promotes=['*'])
self.add('spacing_comp', SpacingComp(nTurbines=nTurbines), promotes=['*'])
# add constraint definitions
self.add('spacing_con', ExecComp('sc = wtSeparationSquared-(minSpacing*rotorDiameter[0])**2',
minSpacing=minSpacing, rotorDiameter=np.zeros(nTurbines),
sc=np.zeros(((nTurbines-1.)*nTurbines/2.)),
wtSeparationSquared=np.zeros(((nTurbines-1.)*nTurbines/2.))),
promotes=['*'])
# add objective component
self.add('obj_comp', ExecComp('obj = -1.*dir_power0', dir_power0=0.0), promotes=['*'])
# initialize design variables for optimization
# self.add('p1', IndepVarComp('turbineX', np.zeros(nTurbines)), promotes=['*'])
# self.add('p2', IndepVarComp('turbineY', np.zeros(nTurbines)), promotes=['*'])
# self.add('p3', IndepVarComp('yaw', np.zeros(nTurbines)), promotes=['*'])
class OptCOE(Group):
"""
Group adding optimization parameters to an AEPGroup
----------------
Design Variables
----------------
turbineX: 1D numpy array containing the x coordinates of each turbine in the global reference frame
turbineY: 1D numpy array containing the x coordinates of each turbine in the global reference frame
yaw_i: 1D numpy array containing the yaw angle of each turbine in the wind direction reference frame for
direction i
---------------
Constant Inputs
---------------
rotorDiameter: 1D numpy array containing the rotor diameter of each turbine
axialInduction: 1D numpy array containing the axial induction of each turbine. These
values are not actually used unless the appropriate floris_param is set.
generator_efficiency: 1D numpy array containing the efficiency of each turbine generator
wind_speed: scalar containing a generally applied inflow wind speed
air_density: scalar containing the inflow air density
windDirections: 1D numpy array containing the angle from N CW to the inflow direction
windrose_frequencies: 1D numpy array containing the probability of each wind direction
Ct: 1D numpy array containing the thrust coefficient of each turbine
Cp: 1D numpy array containing the power coefficient of each turbine
floris_params:FLORISoriginal(False): boolean specifying which formulation of the FLORIS model to use. (True
specfies to use the model as originally formulated and published).
floris_params:CPcorrected(True): boolean specifying whether the Cp values provided have been adjusted
for yaw
floris_params:CTcorrected(True): boolean specifying whether the Ct values provided have been adjusted
for yaw
-------
Returns
-------
COE: scalar containing the final COE for the wind farm
power_directions: 1D numpy array containing the power production for each wind direction (unweighted)
velocitiesTurbines: 1D numpy array of velocity at each turbine in each direction. Currently only accessible by
*.AEPgroup.dir%i.unknowns['velocitiesTurbines']
wt_powers: 1D numpy array of power production at each turbine in each direction. Currently only accessible by
*.AEPgroup.dir%i.unknowns['velocitiesTurbines']
"""
def __init__(self, nTurbines, nDirections=1, minSpacing=2., use_rotor_components=True,
datasize=0, differentiable=True, force_fd=False, nVertices=0, wake_model=floris_wrapper,
wake_model_options=None, params_IdepVar_func=add_floris_params_IndepVarComps,
params_IndepVar_args={'use_rotor_components': False}, nTopologyPoints=0):
super(OptCOE, self).__init__()
if wake_model_options is None:
wake_model_options = {'differentiable': differentiable, 'use_rotor_components': use_rotor_components,
'nSamples': 0, 'verbose': False}
try:
nSamples = wake_model_options['nSamples']
except:
nSamples = 0
if force_fd:
self.fd_options['force_fd'] = True
self.fd_options['form'] = 'forward'
# ##### add major components and groups
# add group that calculates AEP
self.add('AEPgroup', AEPGroup(nTurbines=nTurbines, nDirections=nDirections,
use_rotor_components=use_rotor_components,
datasize=datasize, differentiable=differentiable, wake_model=wake_model,
wake_model_options=wake_model_options,
params_IdepVar_func=params_IdepVar_func,
params_IndepVar_args=params_IndepVar_args, nSamples=nSamples),
promotes=['*'])
# add component that calculates ICC
self.add('ICCcomp', calcICC(nTurbines=nTurbines, nTopologyPoints=nTopologyPoints), promotes=['*'])
# add component that calculates FCR
self.add('FCRcomp', calcFCR(nTurbines=nTurbines), promotes=['*'])
# add component that calculates LLC
self.add('LLCcomp', calcLLC(nTurbines=nTurbines), promotes=['*'])
# add component that calculates O&M
self.add('OandMcomp', calcOandM(nTurbines=nTurbines), promotes=['*'])
# add component that calculates LRC
self.add('LCRcomp', calcLRC(nTurbines=nTurbines), promotes=['*'])
# add component that calculates spacing between each pair of turbines
self.add('spacing_comp', SpacingComp(nTurbines=nTurbines), promotes=['*'])
if nVertices > 0:
# add component that enforces a convex hull wind farm boundary
self.add('boundary_con', BoundaryComp(nVertices=nVertices, nTurbines=nTurbines), promotes=['*'])
# ##### add constraint definitions
self.add('spacing_con', ExecComp('sc = wtSeparationSquared-(minSpacing*rotorDiameter[0])**2',
minSpacing=minSpacing, rotorDiameter=np.zeros(nTurbines),
sc=np.zeros(int(((nTurbines-1.)*nTurbines/2.))),
wtSeparationSquared=np.zeros(int(((nTurbines-1.)*nTurbines/2.)))),
promotes=['*'])
# add objective component
self.add('obj_comp', ExecComp('obj = (FCR+ICC)/AEP+LLC+(OandM+LRC)/AEP', ICC=0.0, AEP=0.0, FCR=0.0, LLC=0.0, OandM=0.0, LRC=0.0), promotes=['*'])
'''
| 50.967033
| 157
| 0.587753
| 1,882
| 18,552
| 5.650372
| 0.146653
| 0.022381
| 0.02934
| 0.045514
| 0.87192
| 0.865526
| 0.857344
| 0.836656
| 0.82791
| 0.82791
| 0
| 0.010776
| 0.319696
| 18,552
| 364
| 158
| 50.967033
| 0.831788
| 0.206608
| 0
| 0.835165
| 0
| 0
| 0.129417
| 0.015506
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021978
| false
| 0.043956
| 0.076923
| 0
| 0.120879
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43fdfef6d57b530579c9eceb0bb910b9e6e7e9c8
| 60,016
|
py
|
Python
|
rrdb/rrdb.py
|
pegasus-kv/pegasus-python-client
|
cb1a91336cc04f5e2ffdc92a6f3831045ece4b95
|
[
"Apache-2.0"
] | 3
|
2018-01-15T06:58:32.000Z
|
2021-11-15T08:42:33.000Z
|
rrdb/rrdb.py
|
pegasus-kv/pegasus-python-client
|
cb1a91336cc04f5e2ffdc92a6f3831045ece4b95
|
[
"Apache-2.0"
] | null | null | null |
rrdb/rrdb.py
|
pegasus-kv/pegasus-python-client
|
cb1a91336cc04f5e2ffdc92a6f3831045ece4b95
|
[
"Apache-2.0"
] | 1
|
2018-01-22T02:07:40.000Z
|
2018-01-22T02:07:40.000Z
|
#
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import logging
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def put(self, update):
"""
Parameters:
- update
"""
pass
def multi_put(self, request):
"""
Parameters:
- request
"""
pass
def remove(self, key):
"""
Parameters:
- key
"""
pass
def multi_remove(self, request):
"""
Parameters:
- request
"""
pass
def get(self, key):
"""
Parameters:
- key
"""
pass
def multi_get(self, request):
"""
Parameters:
- request
"""
pass
def sortkey_count(self, hash_key):
"""
Parameters:
- hash_key
"""
pass
def ttl(self, key):
"""
Parameters:
- key
"""
pass
def get_scanner(self, request):
"""
Parameters:
- request
"""
pass
def scan(self, request):
"""
Parameters:
- request
"""
pass
def clear_scanner(self, context_id):
"""
Parameters:
- context_id
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def put(self, update):
"""
Parameters:
- update
"""
self.send_put(update)
return self.recv_put()
def send_put(self, update):
self._oprot.writeMessageBegin('put', TMessageType.CALL, self._seqid)
args = put_args()
args.update = update
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_put(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = put_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "put failed: unknown result")
def multi_put(self, request):
"""
Parameters:
- request
"""
self.send_multi_put(request)
return self.recv_multi_put()
def send_multi_put(self, request):
self._oprot.writeMessageBegin('multi_put', TMessageType.CALL, self._seqid)
args = multi_put_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_multi_put(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = multi_put_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "multi_put failed: unknown result")
def remove(self, key):
"""
Parameters:
- key
"""
self.send_remove(key)
return self.recv_remove()
def send_remove(self, key):
self._oprot.writeMessageBegin('remove', TMessageType.CALL, self._seqid)
args = remove_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_remove(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = remove_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "remove failed: unknown result")
def multi_remove(self, request):
"""
Parameters:
- request
"""
self.send_multi_remove(request)
return self.recv_multi_remove()
def send_multi_remove(self, request):
self._oprot.writeMessageBegin('multi_remove', TMessageType.CALL, self._seqid)
args = multi_remove_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_multi_remove(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = multi_remove_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "multi_remove failed: unknown result")
def get(self, key):
"""
Parameters:
- key
"""
self.send_get(key)
return self.recv_get()
def send_get(self, key):
self._oprot.writeMessageBegin('get', TMessageType.CALL, self._seqid)
args = get_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get failed: unknown result")
def multi_get(self, request):
"""
Parameters:
- request
"""
self.send_multi_get(request)
return self.recv_multi_get()
def send_multi_get(self, request):
self._oprot.writeMessageBegin('multi_get', TMessageType.CALL, self._seqid)
args = multi_get_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_multi_get(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = multi_get_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "multi_get failed: unknown result")
def sortkey_count(self, hash_key):
"""
Parameters:
- hash_key
"""
self.send_sortkey_count(hash_key)
return self.recv_sortkey_count()
def send_sortkey_count(self, hash_key):
self._oprot.writeMessageBegin('sortkey_count', TMessageType.CALL, self._seqid)
args = sortkey_count_args()
args.hash_key = hash_key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sortkey_count(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = sortkey_count_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "sortkey_count failed: unknown result")
def ttl(self, key):
"""
Parameters:
- key
"""
self.send_ttl(key)
return self.recv_ttl()
def send_ttl(self, key):
self._oprot.writeMessageBegin('ttl', TMessageType.CALL, self._seqid)
args = ttl_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ttl(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = ttl_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ttl failed: unknown result")
def get_scanner(self, request):
"""
Parameters:
- request
"""
self.send_get_scanner(request)
return self.recv_get_scanner()
def send_get_scanner(self, request):
self._oprot.writeMessageBegin('get_scanner', TMessageType.CALL, self._seqid)
args = get_scanner_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_scanner(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_scanner_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_scanner failed: unknown result")
def scan(self, request):
"""
Parameters:
- request
"""
self.send_scan(request)
return self.recv_scan()
def send_scan(self, request):
self._oprot.writeMessageBegin('scan', TMessageType.CALL, self._seqid)
args = scan_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_scan(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = scan_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "scan failed: unknown result")
def clear_scanner(self, context_id):
"""
Parameters:
- context_id
"""
self.send_clear_scanner(context_id)
def send_clear_scanner(self, context_id):
self._oprot.writeMessageBegin('clear_scanner', TMessageType.ONEWAY, self._seqid)
args = clear_scanner_args()
args.context_id = context_id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["put"] = Processor.process_put
self._processMap["multi_put"] = Processor.process_multi_put
self._processMap["remove"] = Processor.process_remove
self._processMap["multi_remove"] = Processor.process_multi_remove
self._processMap["get"] = Processor.process_get
self._processMap["multi_get"] = Processor.process_multi_get
self._processMap["sortkey_count"] = Processor.process_sortkey_count
self._processMap["ttl"] = Processor.process_ttl
self._processMap["get_scanner"] = Processor.process_get_scanner
self._processMap["scan"] = Processor.process_scan
self._processMap["clear_scanner"] = Processor.process_clear_scanner
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_put(self, seqid, iprot, oprot):
args = put_args()
args.read(iprot)
iprot.readMessageEnd()
result = put_result()
try:
result.success = self._handler.put(args.update)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("put", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_multi_put(self, seqid, iprot, oprot):
args = multi_put_args()
args.read(iprot)
iprot.readMessageEnd()
result = multi_put_result()
try:
result.success = self._handler.multi_put(args.request)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("multi_put", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_remove(self, seqid, iprot, oprot):
args = remove_args()
args.read(iprot)
iprot.readMessageEnd()
result = remove_result()
try:
result.success = self._handler.remove(args.key)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("remove", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_multi_remove(self, seqid, iprot, oprot):
args = multi_remove_args()
args.read(iprot)
iprot.readMessageEnd()
result = multi_remove_result()
try:
result.success = self._handler.multi_remove(args.request)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("multi_remove", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get(self, seqid, iprot, oprot):
args = get_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_result()
try:
result.success = self._handler.get(args.key)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_multi_get(self, seqid, iprot, oprot):
args = multi_get_args()
args.read(iprot)
iprot.readMessageEnd()
result = multi_get_result()
try:
result.success = self._handler.multi_get(args.request)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("multi_get", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sortkey_count(self, seqid, iprot, oprot):
args = sortkey_count_args()
args.read(iprot)
iprot.readMessageEnd()
result = sortkey_count_result()
try:
result.success = self._handler.sortkey_count(args.hash_key)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("sortkey_count", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_ttl(self, seqid, iprot, oprot):
args = ttl_args()
args.read(iprot)
iprot.readMessageEnd()
result = ttl_result()
try:
result.success = self._handler.ttl(args.key)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("ttl", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_scanner(self, seqid, iprot, oprot):
args = get_scanner_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_scanner_result()
try:
result.success = self._handler.get_scanner(args.request)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_scanner", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_scan(self, seqid, iprot, oprot):
args = scan_args()
args.read(iprot)
iprot.readMessageEnd()
result = scan_result()
try:
result.success = self._handler.scan(args.request)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("scan", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_clear_scanner(self, seqid, iprot, oprot):
args = clear_scanner_args()
args.read(iprot)
iprot.readMessageEnd()
try:
self._handler.clear_scanner(args.context_id)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except:
pass
# HELPER FUNCTIONS AND STRUCTURES
class put_args:
"""
Attributes:
- update
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'update', (update_request, update_request.thrift_spec), None, ), # 1
)
def __init__(self, update=None,):
self.update = update
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.update = update_request()
self.update.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('put_args')
if self.update is not None:
oprot.writeFieldBegin('update', TType.STRUCT, 1)
self.update.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.update)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class put_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (update_response, update_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = update_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('put_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multi_put_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (multi_put_request, multi_put_request.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = multi_put_request()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('multi_put_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multi_put_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (update_response, update_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = update_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('multi_put_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class remove_args:
"""
Attributes:
- key
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'key', (base.ttypes.blob, base.ttypes.blob.thrift_spec), None, ), # 1
)
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.key = base.ttypes.blob()
self.key.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('remove_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRUCT, 1)
self.key.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.key)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class remove_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (update_response, update_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = update_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('remove_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multi_remove_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (multi_remove_request, multi_remove_request.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = multi_remove_request()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('multi_remove_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multi_remove_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (multi_remove_response, multi_remove_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = multi_remove_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('multi_remove_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_args:
"""
Attributes:
- key
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'key', (base.ttypes.blob, base.ttypes.blob.thrift_spec), None, ), # 1
)
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.key = base.ttypes.blob()
self.key.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRUCT, 1)
self.key.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.key)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (read_response, read_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = read_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multi_get_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (multi_get_request, multi_get_request.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = multi_get_request()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('multi_get_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multi_get_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (multi_get_response, multi_get_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = multi_get_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('multi_get_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sortkey_count_args:
"""
Attributes:
- hash_key
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'hash_key', (base.ttypes.blob, base.ttypes.blob.thrift_spec), None, ), # 1
)
def __init__(self, hash_key=None,):
self.hash_key = hash_key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.hash_key = base.ttypes.blob()
self.hash_key.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sortkey_count_args')
if self.hash_key is not None:
oprot.writeFieldBegin('hash_key', TType.STRUCT, 1)
self.hash_key.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.hash_key)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sortkey_count_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (count_response, count_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = count_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sortkey_count_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ttl_args:
"""
Attributes:
- key
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'key', (base.ttypes.blob, base.ttypes.blob.thrift_spec), None, ), # 1
)
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.key = base.ttypes.blob()
self.key.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ttl_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRUCT, 1)
self.key.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.key)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ttl_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ttl_response, ttl_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ttl_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ttl_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_scanner_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (get_scanner_request, get_scanner_request.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = get_scanner_request()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_scanner_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_scanner_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (scan_response, scan_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = scan_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_scanner_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class scan_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (scan_request, scan_request.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = scan_request()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('scan_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class scan_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (scan_response, scan_response.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = scan_response()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('scan_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class clear_scanner_args:
"""
Attributes:
- context_id
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'context_id', None, None, ), # 1
)
def __init__(self, context_id=None,):
self.context_id = context_id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.context_id = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('clear_scanner_args')
if self.context_id is not None:
oprot.writeFieldBegin('context_id', TType.I64, 1)
oprot.writeI64(self.context_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.context_id)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 29.390793
| 188
| 0.669088
| 7,017
| 60,016
| 5.444349
| 0.023229
| 0.03272
| 0.027328
| 0.05387
| 0.920268
| 0.899039
| 0.870691
| 0.846347
| 0.835824
| 0.82839
| 0
| 0.004103
| 0.216226
| 60,016
| 2,041
| 189
| 29.405194
| 0.808053
| 0.019728
| 0
| 0.842474
| 1
| 0
| 0.026506
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.143495
| false
| 0.007653
| 0.004464
| 0.040179
| 0.284439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a13a835bfbb509dfeca59ce98d544767c1d121e6
| 7,542
|
py
|
Python
|
Python_ch6/Ch6_4_Exceptions.py
|
ninhnguyen01/Python_Book
|
e5e372f1895b06e908cd0dd07dc68a260c34d7ad
|
[
"Apache-2.0"
] | null | null | null |
Python_ch6/Ch6_4_Exceptions.py
|
ninhnguyen01/Python_Book
|
e5e372f1895b06e908cd0dd07dc68a260c34d7ad
|
[
"Apache-2.0"
] | null | null | null |
Python_ch6/Ch6_4_Exceptions.py
|
ninhnguyen01/Python_Book
|
e5e372f1895b06e908cd0dd07dc68a260c34d7ad
|
[
"Apache-2.0"
] | null | null | null |
# Exceptions (Title)
# Reading
# ZeroDivisionError:
# # This program divides a number by another number.
def main():
# Get two numbers.
num1 = int(input('Enter a number: '))
num2 = int(input('Enter another number: '))
# Divide num1 by num2 and display the result.
result = num1 / num2
print(f'{num1} divided by {num2} is {result}')
# Call the main function.
if __name__ == '__main__':
main()
# ZeroDivisionError avoided with if statement:
# This program divides a number by another number.
def main():
# Get two numbers.
num1 = int(input('Enter a number: '))
num2 = int(input('Enter another number: '))
# If num2 is not 0, divide num1 by num2
# and display the result.
if num2 != 0:
result = num1 / num2
print(f'{num1} divided by {num2} is {result}')
else:
print('Cannot divide by zero.')
# Call the main function.
if __name__ == '__main__':
main()
# ValueError:
# This program calculates gross pay.
def main():
# Get the number of hours worked.
hours = int(input('How many hours did you work? '))
# Get the hourly pay rate.
pay_rate = float(input('Enter your hourly pay rate: '))
# Calculate the gross pay.
gross_pay = hours * pay_rate
# Display the gross pay.
print(f'Gross pay: ${gross_pay:,.2f}')
# Call the main function.
if __name__ == '_ _main_ _':
main()
# Exception handler:
# General Format:
# try:
# statement
# statement
# etc.
# except ExceptionName:
# statement
# statement
# etc.
# ValueError avoided with try/except:
# This program calculates gross pay.
def main():
try:
# Get the number of hours worked.
hours = int(input('How many hours did you work? '))
# Get the hourly pay rate.
pay_rate = float(input('Enter your hourly pay rate: '))
# Calculate the gross pay.
gross_pay = hours * pay_rate
# Display the gross pay.
print(f'Gross pay: ${gross_pay:,.2f}')
except ValueError:
print('ERROR: Hours worked and hourly pay rate must')
print('be valid numbers.')
# Call the main function.
if __name__ == '_ _main_ _':
main()
# IOError:
# This program displays the contents
# of a file.
def main():
# Get the name of a file.
filename = input('Enter a filename: ')
# Open the file.
infile = open(filename, 'r')
# Read the file's contents.
contents = infile.read()
# Display the file's contents.
print(contents)
# Close the file.
infile.close()
# Call the main function.
if __name__ == '__main__':
main()
# IOError avoided with try/except:
# This program displays the contents
# of a file.
def main():
# Get the name of a file.
filename = input('Enter a filename: ')
try:
# Open the file.
infile = open(filename, 'r')
# Read the file's contents.
contents = infile.read()
# Display the file's contents.
print(contents)
# Close the file.
infile.close()
except IOError:
print('An error occurred trying to read')
print('the file', filename)
# Call the main function.
if __name__ == '_ _main_ _':
main()
# Mutilple Exceptions:
# This program displays the total of the
# amounts in the sales_data.txt file.
def main():
# Initialize an accumulator.
total = 0.0
try:
# Open the sales_data.txt file.
infile = open('sales_data.txt', 'r')
# Read the values from the file and
# accumulate them.
for line in infile:
amount = float(line)
total += amount
# Close the file.
infile.close()
# Print the total.
print(f'{total:,.2f}')
except IOError:
print('An error occured trying to read the file.')
except ValueError:
print('Non-numeric data found in the file.')
except:
print('An error occured.')
# Call the main function.
if __name__ == '_ _main_ _':
main()
# One Except Clause to catch all Exceptions:
# This program displays the total of the
# amounts in the sales_data.txt file.
def main():
# Initialize an accumulator.
total = 0.0
try:
# Open the sales_data.txt file.
infile = open('sales_data.txt', 'r')
# Read the values from the file and
# accumulate them.
for line in infile:
amount = float(line)
total += amount
# Close the file.
infile.close()
# Print the total.
print(f'{total:,.2f}')
except:
print('An error occurred.')
# Call the main function.
if __name__ == '_ _main_ _':
main()
# Exception Default Error Message:
# This program calculates gross pay.
def main():
try:
# Get the number of hours worked.
hours = int(input('How many hours did you work? '))
# Get the hourly pay rate.
pay_rate = float(input('Enter your hourly pay rate: '))
# Calculate the gross pay.
gross_pay = hours * pay_rate
# Display the gross pay.
print(f'Gross pay: ${gross_pay:,.2f}')
except ValueError as err:
print(err)
# Call the main function.
if __name__ == '_ _main_ _':
main()
# This program displays the total of the
# amounts in the sales_data.txt file.
def main():
# Initialize an accumulator.
total = 0.0
try:
# Open the sales_data.txt file.
infile = open('sales_data.txt', 'r')
# Read the values from the file and
# accumulate them.
for line in infile:
amount = float(line)
total += amount
# Close the file.
infile.close()
# Print the total.
print(f'{total:,.2f}')
except Exception as err:
print(err)
# Call the main function.
if __name__ == '_ _main_ _':
main()
# The else Clause:
# General Format:
# try:
# statement
# statement
# etc.
# except ExceptionName:
# statement
# statement
# etc.
# else:
# statement
# statement
# etc.
# This program displays the total of the
# amounts in the sales_data.txt file.
def main():
# Initialize an accumulator.
total = 0.0
try:
# Open the sales_data.txt file.
infile = open('sales_data.txt', 'r')
# Read the values from the file and
# accumulate them.
for line in infile:
amount = float(line)
total += amount
# Close the file.
infile.close()
except Exception as err:
print(err)
else:
# Print the total.
print(f'{total:,.2f}')
# Call the main function.
if __name__ == '_ _main_ _':
main()
# finally Clause:
# General Format:
# try:
# statement
# statement
# etc.
# except ExceptionName:
# statement
# statement
# etc.
# finally:
# statement
# statement
# etc.
# End
# Checkpoint
# 6.19 Briefly describe what an exception is.
# A. An exception is an error that occurs while a program
# is running. In most cases, an exception causes a program
# to abruptly halt.
# 6.20 If an exception is raised and the program does not handle
# it with a try/except statement, what happens?
# A. The program halts.
# 6.21 What type of exception does a program raise when it tries
# to open a nonexistent file?
# A. IOError.
# 6.22 What type of exception does a program raise when it uses
# the float function to convert a non-numeric string to a number?
# A. ValueError.
# End
| 22.446429
| 65
| 0.603421
| 984
| 7,542
| 4.511179
| 0.151423
| 0.029962
| 0.03244
| 0.047083
| 0.806263
| 0.797702
| 0.779004
| 0.765488
| 0.719982
| 0.675152
| 0
| 0.008841
| 0.295147
| 7,542
| 336
| 66
| 22.446429
| 0.826185
| 0.462344
| 0
| 0.909836
| 0
| 0
| 0.226854
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090164
| false
| 0
| 0
| 0
| 0.090164
| 0.188525
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a186eb03412bef3a6cc899ccb6ec9826159af45a
| 147
|
py
|
Python
|
torchlib/layers/__init__.py
|
AnonymousExplorer/Conditional-GANs-Pytorch
|
6c15ec67217156d6f041e34efe29ab62f9ef7c7d
|
[
"MIT"
] | 40
|
2018-12-11T02:14:19.000Z
|
2022-03-19T06:16:26.000Z
|
torchlib/layers/__init__.py
|
AnonymousExplorer/Conditional-GANs-Pytorch
|
6c15ec67217156d6f041e34efe29ab62f9ef7c7d
|
[
"MIT"
] | null | null | null |
torchlib/layers/__init__.py
|
AnonymousExplorer/Conditional-GANs-Pytorch
|
6c15ec67217156d6f041e34efe29ab62f9ef7c7d
|
[
"MIT"
] | 19
|
2019-03-21T19:11:14.000Z
|
2022-01-17T05:54:13.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from torchlib.layers.layers import *
| 24.5
| 38
| 0.863946
| 19
| 147
| 5.947368
| 0.473684
| 0.265487
| 0.424779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115646
| 147
| 5
| 39
| 29.4
| 0.869231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a19c426cfa17fe02fd822e6b8f96c100cf44a9ad
| 4,443
|
py
|
Python
|
pyaz/netappfiles/volume/backup/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/netappfiles/volume/backup/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/netappfiles/volume/backup/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage Azure NetApp Files (ANF) Volume Backup Resources.
'''
from .... pyaz_utils import _call_az
def show(account_name, backup_name, pool_name, resource_group, volume_name):
'''
Get the specified ANF Backup.
Required Parameters:
- account_name -- Name of the ANF account.
- backup_name -- The name of the backup.
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- volume_name -- Name of the ANF volume.
'''
return _call_az("az netappfiles volume backup show", locals())
def list(account_name, pool_name, resource_group, volume_name):
'''
List the ANF Backups for the specified volume.
Required Parameters:
- account_name -- Name of the ANF account.
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- volume_name -- Name of the ANF volume.
'''
return _call_az("az netappfiles volume backup list", locals())
def delete(account_name, backup_name, pool_name, resource_group, volume_name):
'''
Required Parameters:
- account_name -- Name of the ANF account.
- backup_name -- The name of the backup.
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- volume_name -- Name of the ANF volume.
'''
return _call_az("az netappfiles volume backup delete", locals())
def status(account_name, pool_name, resource_group, volume_name):
'''
Get backup status of the specified ANF Volume.
Required Parameters:
- account_name -- Name of the ANF account.
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- volume_name -- Name of the ANF volume.
'''
return _call_az("az netappfiles volume backup status", locals())
def restore_status(account_name, pool_name, resource_group, volume_name):
'''
Get backup restore status of the specified ANF Volume.
Required Parameters:
- account_name -- Name of the ANF account.
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- volume_name -- Name of the ANF volume.
'''
return _call_az("az netappfiles volume backup restore-status", locals())
def update(account_name, backup_name, pool_name, resource_group, volume_name, label=None, tags=None, use_existing_snapshot=None):
'''
Update the specified ANF backup with the values provided.
Required Parameters:
- account_name -- Name of the ANF account.
- backup_name -- None
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- volume_name -- Name of the ANF volume.
Optional Parameters:
- label -- Label for backup.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
- use_existing_snapshot -- Manual backup an already existing snapshot. This will always be false for scheduled backups and true/false for manual backups.
'''
return _call_az("az netappfiles volume backup update", locals())
def create(account_name, backup_name, location, pool_name, resource_group, volume_name, use_existing_snapshot=None):
'''
Create specified ANF volume backup.
Required Parameters:
- account_name -- Name of the ANF account.
- backup_name -- None
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- volume_name -- Name of the ANF volume.
Optional Parameters:
- use_existing_snapshot -- Manual backup an already existing snapshot. This will always be false for scheduled backups and true/false for manual backups.
'''
return _call_az("az netappfiles volume backup create", locals())
| 40.761468
| 161
| 0.705379
| 608
| 4,443
| 5.006579
| 0.120066
| 0.059133
| 0.068003
| 0.089685
| 0.801248
| 0.793035
| 0.782852
| 0.782852
| 0.768068
| 0.768068
| 0
| 0
| 0.203916
| 4,443
| 108
| 162
| 41.138889
| 0.860616
| 0.666892
| 0
| 0
| 0
| 0
| 0.211017
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.466667
| false
| 0
| 0.066667
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
a1a052886ad6dc428ca2af41746db43c2c9f2548
| 77
|
py
|
Python
|
test_unittest.py
|
itsAshna/Stop-Words-List
|
b023008015d228991d92561bcc2fb109fa08bf28
|
[
"MIT"
] | null | null | null |
test_unittest.py
|
itsAshna/Stop-Words-List
|
b023008015d228991d92561bcc2fb109fa08bf28
|
[
"MIT"
] | null | null | null |
test_unittest.py
|
itsAshna/Stop-Words-List
|
b023008015d228991d92561bcc2fb109fa08bf28
|
[
"MIT"
] | 1
|
2021-07-08T17:22:16.000Z
|
2021-07-08T17:22:16.000Z
|
import parser
def test_check_double():
assert parser.runner() == "PASS"
| 15.4
| 36
| 0.701299
| 10
| 77
| 5.2
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168831
| 77
| 4
| 37
| 19.25
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0.051948
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
a1a930ee7f030dcd160f1734bdfe9ed33a8fd8c5
| 102
|
py
|
Python
|
app/back/mongo/data/collect/rivers/__init__.py
|
jgphilpott/polyplot
|
c46861174ee5881dadffbfb2278d555462523547
|
[
"MIT"
] | 5
|
2021-05-17T14:17:14.000Z
|
2021-12-14T12:54:32.000Z
|
app/back/mongo/data/collect/rivers/__init__.py
|
jgphilpott/iGraph
|
2a91ba57e4950856a83d3a109753f8f2badee829
|
[
"MIT"
] | 8
|
2020-02-09T02:48:41.000Z
|
2021-05-16T04:57:02.000Z
|
app/back/mongo/data/collect/rivers/__init__.py
|
jgphilpott/iGraph
|
2a91ba57e4950856a83d3a109753f8f2badee829
|
[
"MIT"
] | 2
|
2016-09-12T03:48:16.000Z
|
2019-05-04T14:15:19.000Z
|
from back.mongo.data.collect.rivers.model import *
from back.mongo.data.collect.rivers.mongo import *
| 34
| 50
| 0.803922
| 16
| 102
| 5.125
| 0.5
| 0.195122
| 0.317073
| 0.414634
| 0.731707
| 0.731707
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 102
| 2
| 51
| 51
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a1b251e27adb50843c3576bb689fe09f996d2ad0
| 2,664
|
py
|
Python
|
anubis-management-api/anubis/tests/test_tenants.py
|
orchestracities/anubis
|
34cfeeef4485f2e011da6e6216d8c67041366c63
|
[
"Apache-2.0"
] | null | null | null |
anubis-management-api/anubis/tests/test_tenants.py
|
orchestracities/anubis
|
34cfeeef4485f2e011da6e6216d8c67041366c63
|
[
"Apache-2.0"
] | 21
|
2022-02-21T14:07:04.000Z
|
2022-03-29T14:19:37.000Z
|
anubis-management-api/anubis/tests/test_tenants.py
|
orchestracities/anubis
|
34cfeeef4485f2e011da6e6216d8c67041366c63
|
[
"Apache-2.0"
] | null | null | null |
from fastapi.testclient import TestClient
from ..main import app
from .utils import test_db
client = TestClient(app)
def test_tenants(test_db):
response = client.post(
"/v1/tenants/",
json={"name": "test"}
)
assert response.status_code == 201
tenant_id = response.headers["tenant-id"]
assert tenant_id
response = client.get("/v1/tenants/")
body = response.json()
assert response.status_code == 200
assert len(body) == 1
response = client.get("/v1/tenants/" + tenant_id)
body = response.json()
assert response.status_code == 200
assert body["name"] == "test"
response = client.delete("/v1/tenants/" + tenant_id)
assert response.status_code == 204
def test_service_paths(test_db):
response = client.post(
"/v1/tenants/",
json={"name": "test"}
)
assert response.status_code == 201
tenant_id = response.headers["tenant-id"]
assert tenant_id
response = client.get("/v1/tenants/" + tenant_id + "/service_paths/")
body = response.json()
assert response.status_code == 200
assert body[0]["path"] == "/"
response = client.post(
"/v1/tenants/" + tenant_id + "/service_paths",
json={"path": "/foobar"}
)
assert response.status_code == 201
service_path_id = response.headers["Service-Path-ID"]
assert service_path_id
response = client.post(
"/v1/tenants/" + tenant_id + "/service_paths",
json={"path": "/foobar/barbar"}
)
assert response.status_code == 201
service_path_id = response.headers["Service-Path-ID"]
assert service_path_id
response = client.get("/v1/tenants/" + tenant_id + "/service_paths/")
body = response.json()
assert response.status_code == 200
assert len(body) == 3
response = client.get(
"/v1/tenants/" +
tenant_id +
"/service_paths/?name=/foobar/barbar")
body = response.json()
assert response.status_code == 200
assert len(body) == 1
response = client.get(
"/v1/tenants/" +
tenant_id +
"/service_paths/?name=/foobar")
body = response.json()
assert response.status_code == 200
assert len(body) == 2
response = client.get(
"/v1/tenants/" +
tenant_id +
"/service_paths/" +
service_path_id)
body = response.json()
assert response.status_code == 200
assert body["path"] == "/foobar/barbar"
assert body["tenant_id"] == tenant_id
response = client.delete(
"/v1/tenants/" +
tenant_id +
"/service_paths/" +
service_path_id)
assert response.status_code == 204
| 26.909091
| 73
| 0.616366
| 316
| 2,664
| 5.022152
| 0.123418
| 0.090737
| 0.163831
| 0.196597
| 0.876497
| 0.876497
| 0.84247
| 0.806553
| 0.806553
| 0.744171
| 0
| 0.028274
| 0.243243
| 2,664
| 98
| 74
| 27.183673
| 0.758929
| 0
| 0
| 0.703704
| 0
| 0
| 0.165165
| 0.023649
| 0
| 0
| 0
| 0
| 0.308642
| 1
| 0.024691
| false
| 0
| 0.037037
| 0
| 0.061728
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1b378f5abe19b555783636f828945b0b7ae173b
| 2,565
|
py
|
Python
|
tests/scripts/test_update_instrument.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
tests/scripts/test_update_instrument.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
tests/scripts/test_update_instrument.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
"""
import pytest
from sampledb.logic import instruments
import sampledb.__main__ as scripts
from ..test_utils import app_context
def test_update_instrument(capsys):
name = 'Example Instrument'
description = 'Example Instrument Description'
instrument = instruments.create_instrument(name, description)
assert len(instruments.get_instruments()) == 1
scripts.main([scripts.__file__, 'update_instrument', str(instrument.id), name, ''])
assert "Success" in capsys.readouterr()[0]
assert len(instruments.get_instruments()) == 1
instrument = instruments.get_instruments()[0]
assert instrument.name == name
assert instrument.description == ''
assert len(instrument.responsible_users) == 0
def test_update_instrument_missing_arguments(capsys):
name = 'Example Instrument'
description = 'Example Instrument Description'
instrument = instruments.create_instrument(name, description)
assert len(instruments.get_instruments()) == 1
with pytest.raises(SystemExit) as exc_info:
scripts.main([scripts.__file__, 'update_instrument', str(instrument.id)])
assert exc_info.value != 0
assert "Usage" in capsys.readouterr()[0]
assert len(instruments.get_instruments()) == 1
instrument = instruments.get_instruments()[0]
assert instrument.name == name
assert instrument.description == description
def test_update_missing_instrument(capsys):
name = 'Example Instrument'
description = 'Example Instrument Description'
assert len(instruments.get_instruments()) == 0
with pytest.raises(SystemExit) as exc_info:
scripts.main([scripts.__file__, 'update_instrument', str(1), name, description])
assert exc_info.value != 0
assert "Error: no instrument with this id exists" in capsys.readouterr()[1]
assert len(instruments.get_instruments()) == 0
def test_update_instrument_invalid_instrument_id(capsys):
name = 'Example Instrument'
description = 'Example Instrument Description'
instrument = instruments.create_instrument(name, description)
assert len(instruments.get_instruments()) == 1
with pytest.raises(SystemExit) as exc_info:
scripts.main([scripts.__file__, 'update_instrument', name, name, ""])
assert exc_info.value != 0
assert "Error: instrument_id must be an integer" in capsys.readouterr()[1]
assert len(instruments.get_instruments()) == 1
instrument = instruments.get_instruments()[0]
assert instrument.name == name
assert instrument.description == description
| 35.625
| 88
| 0.732554
| 294
| 2,565
| 6.180272
| 0.187075
| 0.127133
| 0.151348
| 0.101266
| 0.826087
| 0.80022
| 0.766648
| 0.733627
| 0.733627
| 0.610897
| 0
| 0.009763
| 0.161404
| 2,565
| 71
| 89
| 36.126761
| 0.83496
| 0.005068
| 0
| 0.647059
| 0
| 0
| 0.13808
| 0
| 0
| 0
| 0
| 0
| 0.431373
| 1
| 0.078431
| false
| 0
| 0.078431
| 0
| 0.156863
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1f3bf791894dcc4f7ed70790e61b8a6efc3a9c8
| 195
|
py
|
Python
|
scripts/wnet/create_exportable_tiles.py
|
SchiffFlieger/semantic-segmentation-master-thesis
|
f54b8321a9e0828e492bc6847acbff80c1a75d7c
|
[
"MIT"
] | 1
|
2021-02-07T09:22:44.000Z
|
2021-02-07T09:22:44.000Z
|
scripts/wnet/create_exportable_tiles.py
|
SchiffFlieger/semantic-segmentation-master-thesis
|
f54b8321a9e0828e492bc6847acbff80c1a75d7c
|
[
"MIT"
] | null | null | null |
scripts/wnet/create_exportable_tiles.py
|
SchiffFlieger/semantic-segmentation-master-thesis
|
f54b8321a9e0828e492bc6847acbff80c1a75d7c
|
[
"MIT"
] | null | null | null |
from scripts.common.create_exportable_tiles import create_exportable_tile_table
if __name__ == '__main__':
create_exportable_tile_table(table_suffix="wnet", tile_size=25.6, label_size=25.6)
| 39
| 86
| 0.825641
| 29
| 195
| 4.896552
| 0.62069
| 0.338028
| 0.28169
| 0.352113
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03352
| 0.082051
| 195
| 4
| 87
| 48.75
| 0.759777
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
62cf9a96333e9bfaa43b3118180b69188f97c7f9
| 11,104
|
py
|
Python
|
server/tests/range_test.py
|
chejennifer/website
|
b7589f1fa288e0e4e12cd97a6bbbcb9f3fd3d833
|
[
"Apache-2.0"
] | 11
|
2020-07-18T17:04:26.000Z
|
2022-03-23T08:44:09.000Z
|
server/tests/range_test.py
|
chejennifer/website
|
b7589f1fa288e0e4e12cd97a6bbbcb9f3fd3d833
|
[
"Apache-2.0"
] | 747
|
2020-06-22T16:56:45.000Z
|
2022-03-31T19:04:55.000Z
|
server/tests/range_test.py
|
chejennifer/website
|
b7589f1fa288e0e4e12cd97a6bbbcb9f3fd3d833
|
[
"Apache-2.0"
] | 33
|
2019-09-25T21:26:12.000Z
|
2022-03-23T08:27:33.000Z
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import lib.range as lib_range
class TestAggregate(unittest.TestCase):
def test_us_age(self):
input = {
"country/USA": [
"Count_Person_Upto4Years", "Count_Person_5To9Years",
"Count_Person_10To14Years", "Count_Person_15To19Years",
"Count_Person_20To24Years", "Count_Person_25To29Years",
"Count_Person_30To34Years", "Count_Person_35To39Years",
"Count_Person_40To44Years", "Count_Person_45To49Years",
"Count_Person_50To54Years", "Count_Person_55To59Years",
"Count_Person_60To64Years", "Count_Person_65To69Years",
"Count_Person_70To74Years", "Count_Person_75To79Years",
"Count_Person_80OrMoreYears", "Count_Person_5To17Years",
"Count_Person_18To24Years", "Count_Person_25To34Years",
"Count_Person_35To44Years", "Count_Person_45To54Years",
"Count_Person_55To59Years", "Count_Person_60To61Years",
"Count_Person_62To64Years", "Count_Person_65To74Years",
"Count_Person_65OrMoreYears", "Count_Person_75OrMoreYears"
]
}
expected = {
'country/USA': {
'Count_Person_5To17Years': ['Count_Person_5To17Years'],
'Count_Person_18To24Years': ['Count_Person_18To24Years'],
'Count_Person_25To34Years': ['Count_Person_25To34Years'],
'Count_Person_35To44Years': ['Count_Person_35To44Years'],
'Count_Person_45To54Years': ['Count_Person_45To54Years'],
'Count_Person_55To64Years': [
'Count_Person_55To59Years', 'Count_Person_60To61Years',
'Count_Person_62To64Years'
],
'Count_Person_65To74Years': ['Count_Person_65To74Years'],
'Count_Person_75OrMoreYears': ['Count_Person_75OrMoreYears']
}
}
assert lib_range.aggregate_stat_var(input, lib_range.AGE) == expected
def test_us_place_age(self):
input = {
"country/USA": [
"Count_Person_Upto4Years",
"Count_Person_5To9Years",
"Count_Person_10To14Years",
"Count_Person_15To19Years",
"Count_Person_20To24Years",
"Count_Person_25To29Years",
"Count_Person_30To34Years",
"Count_Person_35To39Years",
"Count_Person_40To44Years",
"Count_Person_45To49Years",
"Count_Person_50To54Years",
"Count_Person_55To59Years",
"Count_Person_60To64Years",
"Count_Person_65To69Years",
"Count_Person_70To74Years",
"Count_Person_75To79Years",
"Count_Person_80OrMoreYears",
"Count_Person_5To17Years",
"Count_Person_18To24Years",
"Count_Person_25To34Years",
"Count_Person_35To44Years",
"Count_Person_45To54Years",
"Count_Person_55To59Years",
"Count_Person_60To61Years",
"Count_Person_62To64Years",
"Count_Person_65To74Years",
"Count_Person_65OrMoreYears",
"Count_Person_75OrMoreYears",
"Count_Person_5To17Years",
"Count_Person_18To24Years",
"Count_Person_25To34Years",
"Count_Person_35To44Years",
"Count_Person_45To54Years",
"Count_Person_45To54Years",
"Count_Person_55To64Years",
"Count_Person_65To74Years",
],
"geoId/12345": [
"Count_Person_5To17Years",
"Count_Person_18To24Years",
"Count_Person_25To34Years",
"Count_Person_35To44Years",
"Count_Person_45To54Years",
"Count_Person_55To59Years",
"Count_Person_60To61Years",
"Count_Person_62To64Years",
"Count_Person_65To74Years",
]
}
expected = {
'country/USA': {
'Count_Person_5To17Years': ['Count_Person_5To17Years'],
'Count_Person_18To24Years': ['Count_Person_18To24Years'],
'Count_Person_25To34Years': ['Count_Person_25To34Years'],
'Count_Person_35To44Years': ['Count_Person_35To44Years'],
'Count_Person_45To54Years': ['Count_Person_45To54Years'],
'Count_Person_55To64Years': [
'Count_Person_55To59Years', 'Count_Person_60To61Years',
'Count_Person_62To64Years'
],
'Count_Person_65To74Years': ['Count_Person_65To74Years'],
'Count_Person_75OrMoreYears': ['Count_Person_75OrMoreYears']
},
'geoId/12345': {
'Count_Person_5To17Years': ['Count_Person_5To17Years'],
'Count_Person_18To24Years': ['Count_Person_18To24Years'],
'Count_Person_25To34Years': ['Count_Person_25To34Years'],
'Count_Person_35To44Years': ['Count_Person_35To44Years'],
'Count_Person_45To54Years': ['Count_Person_45To54Years'],
'Count_Person_55To64Years': [
'Count_Person_55To59Years', 'Count_Person_60To61Years',
'Count_Person_62To64Years'
],
'Count_Person_65To74Years': ['Count_Person_65To74Years']
}
}
assert lib_range.aggregate_stat_var(input, lib_range.AGE) == expected
def test_eu_place_age(self):
input = {
"country/FRA": [
"Count_Person_Upto4Years",
"Count_Person_5To9Years",
"Count_Person_10To14Years",
"Count_Person_15To19Years",
"Count_Person_20To24Years",
"Count_Person_25To29Years",
"Count_Person_30To34Years",
"Count_Person_35To39Years",
"Count_Person_40To44Years",
"Count_Person_45To49Years",
"Count_Person_50To54Years",
"Count_Person_55To59Years",
"Count_Person_60To64Years",
"Count_Person_65To69Years",
"Count_Person_70To74Years",
"Count_Person_75To79Years",
"Count_Person_80OrMoreYears",
"Count_Person_5To17Years",
"Count_Person_18To24Years",
"Count_Person_25To34Years",
"Count_Person_35To44Years",
"Count_Person_45To54Years",
"Count_Person_55To59Years",
"Count_Person_60To61Years",
"Count_Person_62To64Years",
"Count_Person_65To74Years",
"Count_Person_65OrMoreYears",
"Count_Person_75OrMoreYears",
"Count_Person_5To17Years",
"Count_Person_18To24Years",
"Count_Person_25To34Years",
"Count_Person_35To44Years",
"Count_Person_45To54Years",
"Count_Person_45To54Years",
"Count_Person_55To64Years",
"Count_Person_65To74Years",
],
"country/ITA": [
"Count_Person_Upto4Years",
"Count_Person_5To9Years",
"Count_Person_10To14Years",
"Count_Person_15To19Years",
"Count_Person_20To24Years",
"Count_Person_25To29Years",
"Count_Person_30To34Years",
"Count_Person_35To39Years",
"Count_Person_40To44Years",
"Count_Person_45To49Years",
"Count_Person_50To54Years",
"Count_Person_55To59Years",
"Count_Person_60To64Years",
"Count_Person_65To69Years",
]
}
expected = {
'country/FRA': {
'Count_Person_Upto9Years': [
'Count_Person_5To9Years',
'Count_Person_Upto4Years',
],
'Count_Person_10To19Years': [
'Count_Person_10To14Years',
'Count_Person_15To19Years',
],
'Count_Person_20To29Years': [
'Count_Person_20To24Years',
'Count_Person_25To29Years',
],
'Count_Person_30To39Years': [
'Count_Person_30To34Years',
'Count_Person_35To39Years',
],
'Count_Person_40To49Years': [
'Count_Person_40To44Years', 'Count_Person_45To49Years'
],
'Count_Person_50To59Years': [
'Count_Person_50To54Years',
'Count_Person_55To59Years',
],
'Count_Person_60To69Years': [
'Count_Person_60To64Years',
'Count_Person_65To69Years',
],
'Count_Person_70OrMoreYears': [
'Count_Person_70To74Years', 'Count_Person_75To79Years',
'Count_Person_80OrMoreYears'
]
},
'country/ITA': {
'Count_Person_Upto9Years': [
'Count_Person_5To9Years', 'Count_Person_Upto4Years'
],
'Count_Person_10To19Years': [
'Count_Person_10To14Years', 'Count_Person_15To19Years'
],
'Count_Person_20To29Years': [
'Count_Person_20To24Years', 'Count_Person_25To29Years'
],
'Count_Person_30To39Years': [
'Count_Person_30To34Years', 'Count_Person_35To39Years'
],
'Count_Person_40To49Years': [
'Count_Person_40To44Years', 'Count_Person_45To49Years'
],
'Count_Person_50To59Years': [
'Count_Person_50To54Years', 'Count_Person_55To59Years'
],
'Count_Person_60To69Years': [
'Count_Person_60To64Years', 'Count_Person_65To69Years'
]
}
}
assert lib_range.aggregate_stat_var(input, lib_range.AGE) == expected
| 43.545098
| 77
| 0.557637
| 841
| 11,104
| 6.812128
| 0.151011
| 0.424332
| 0.053762
| 0.06598
| 0.899284
| 0.894222
| 0.894222
| 0.890731
| 0.888811
| 0.876942
| 0
| 0.116302
| 0.361942
| 11,104
| 254
| 78
| 43.716535
| 0.692308
| 0.049352
| 0
| 0.726496
| 0
| 0
| 0.513375
| 0.502941
| 0
| 0
| 0
| 0
| 0.012821
| 1
| 0.012821
| false
| 0
| 0.008547
| 0
| 0.025641
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
62e5fc9d1f89a652cd9fe88a2a1e9894d3dfafcc
| 7,776
|
py
|
Python
|
models/wow_community/CharacterProfile.py
|
DylanCa/PBAW
|
b1f50fb9a21ada9ec6deb616820f9177fc36146f
|
[
"MIT"
] | 1
|
2018-08-07T10:00:44.000Z
|
2018-08-07T10:00:44.000Z
|
models/wow_community/CharacterProfile.py
|
DylanCa/PBAW
|
b1f50fb9a21ada9ec6deb616820f9177fc36146f
|
[
"MIT"
] | 1
|
2018-08-07T08:17:59.000Z
|
2018-08-07T08:17:59.000Z
|
models/wow_community/CharacterProfile.py
|
DylanCa/PBAW
|
b1f50fb9a21ada9ec6deb616820f9177fc36146f
|
[
"MIT"
] | null | null | null |
from models import Fetcher
class CharacterProfile():
def __init__(self):
self.route = '/wow/character/{}/{}'
def characterProfile(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server, locale=locale, route=self.route)
def achievements(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=achievements')
def appearance(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=appearance')
def feed(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=feed')
def guild(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=guild')
def hunterPets(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=hunterPets')
def items(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=items')
def mounts(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=mounts')
def pets(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=pets')
def petSlots(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=petSlots')
def professions(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=professions')
def progression(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=progression')
def pvp(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=pvp')
def quests(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=quests')
def reputation(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=reputation')
def statistics(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=statistics')
def stats(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=stats')
def talents(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=talents')
def titles(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=titles')
def audit(self,
server="eu",
realm="archimonde",
characterName="Protòtype",
locale="en_US"):
self.route = self.route.format(realm, characterName)
return Fetcher.fetchData(
server=server,
locale=locale,
route=self.route,
params='fields=audit')
| 27.477032
| 60
| 0.501543
| 650
| 7,776
| 5.963077
| 0.069231
| 0.141641
| 0.144479
| 0.087719
| 0.889577
| 0.889577
| 0.889577
| 0.889577
| 0.889577
| 0.889577
| 0
| 0
| 0.393904
| 7,776
| 282
| 61
| 27.574468
| 0.822406
| 0
| 0
| 0.800905
| 0
| 0
| 0.104295
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095023
| false
| 0
| 0.004525
| 0
| 0.19457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62fcc9cd4e46ee14c0831b6431d373fb69986fc5
| 25,639
|
py
|
Python
|
publishfeed/test_data/feedparser_data.py
|
RobertLD/publishfeed-OTC
|
b67192517706f5dfbe0210f5deaef258ae10d28d
|
[
"MIT"
] | 9
|
2017-06-08T18:13:33.000Z
|
2021-11-02T12:41:46.000Z
|
publishfeed/test_data/feedparser_data.py
|
RobertLD/publishfeed-OTC
|
b67192517706f5dfbe0210f5deaef258ae10d28d
|
[
"MIT"
] | 4
|
2021-03-30T15:52:52.000Z
|
2021-04-11T17:22:46.000Z
|
publishfeed/test_data/feedparser_data.py
|
RobertLD/publishfeed-OTC
|
b67192517706f5dfbe0210f5deaef258ae10d28d
|
[
"MIT"
] | 4
|
2020-11-26T23:20:16.000Z
|
2022-03-03T12:20:25.000Z
|
from munch import munchify
# a real response example without time structs (from published_parsed and update-parsed)
feedparser_parse_response = {'feed': {'title': 'Hacker News', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Hacker News'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://news.ycombinator.com/'}], 'link': 'https://news.ycombinator.com/', 'subtitle': 'Links for the intellectually curious, ranked by readers.', 'subtitle_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Links for the intellectually curious, ranked by readers.'}}, 'entries': [{'title': 'In 1957, Five Men Agreed to Stand Under an Exploding Nuclear Bomb (2012)', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'In 1957, Five Men Agreed to Stand Under an Exploding Nuclear Bomb (2012)'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://www.npr.org/sections/krulwich/2012/07/16/156851175/five-men-agree-to-stand-directly-under-an-exploding-nuclear-bomb'}], 'link': 'http://www.npr.org/sections/krulwich/2012/07/16/156851175/five-men-agree-to-stand-directly-under-an-exploding-nuclear-bomb', 'published': 'Wed, 7 Jun 2017 16:25:41 +0000', 'comments': 'https://news.ycombinator.com/item?id=14507673', 'summary': '<a href="https://news.ycombinator.com/item?id=14507673">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14507673">Comments</a>'}}, {'title': 'Americans from Both Political Parties Overwhelmingly Support Net Neutrality', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Americans from Both Political Parties Overwhelmingly Support Net Neutrality'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://blog.mozilla.org/blog/2017/06/06/new-mozilla-poll-americans-political-parties-overwhelmingly-support-net-neutrality/'}], 'link': 'https://blog.mozilla.org/blog/2017/06/06/new-mozilla-poll-americans-political-parties-overwhelmingly-support-net-neutrality/', 'published': 'Wed, 7 Jun 2017 18:54:13 +0000', 'comments': 'https://news.ycombinator.com/item?id=14508921', 'summary': '<a href="https://news.ycombinator.com/item?id=14508921">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14508921">Comments</a>'}}, {'title': 'Options vs. Cash', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Options vs. Cash'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://danluu.com/startup-options/'}], 'link': 'https://danluu.com/startup-options/', 'published': 'Wed, 7 Jun 2017 11:39:04 +0000', 'comments': 'https://news.ycombinator.com/item?id=14505378', 'summary': '<a href="https://news.ycombinator.com/item?id=14505378">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14505378">Comments</a>'}}, {'title': 'Performance Improvements in .NET Core', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Performance Improvements in .NET Core'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://blogs.msdn.microsoft.com/dotnet/2017/06/07/performance-improvements-in-net-core/'}], 'link': 'https://blogs.msdn.microsoft.com/dotnet/2017/06/07/performance-improvements-in-net-core/', 'published': 'Wed, 7 Jun 2017 16:56:06 +0000', 'comments': 'https://news.ycombinator.com/item?id=14507936', 'summary': '<a href="https://news.ycombinator.com/item?id=14507936">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14507936">Comments</a>'}}, {'title': 'Oldest Fossils of Homo Sapiens Found in Morocco', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Oldest Fossils of Homo Sapiens Found in Morocco'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://www.nytimes.com/2017/06/07/science/human-fossils-morocco.html'}], 'link': 'https://www.nytimes.com/2017/06/07/science/human-fossils-morocco.html', 'published': 'Wed, 7 Jun 2017 17:08:05 +0000', 'comments': 'https://news.ycombinator.com/item?id=14508029', 'summary': '<a href="https://news.ycombinator.com/item?id=14508029">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14508029">Comments</a>'}}, {'title': 'Atlas of Lie Groups and Representations', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Atlas of Lie Groups and Representations'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://www.liegroups.org/software/documentation/atlasofliegroups-docs/index.html'}], 'link': 'http://www.liegroups.org/software/documentation/atlasofliegroups-docs/index.html', 'published': 'Wed, 7 Jun 2017 10:34:20 +0000', 'comments': 'https://news.ycombinator.com/item?id=14505047', 'summary': '<a href="https://news.ycombinator.com/item?id=14505047">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14505047">Comments</a>'}}, {'title': 'A Brief History of the UUID', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'A Brief History of the UUID'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://segment.com/blog/a-brief-history-of-the-uuid/'}], 'link': 'https://segment.com/blog/a-brief-history-of-the-uuid/', 'published': 'Wed, 7 Jun 2017 17:51:36 +0000', 'comments': 'https://news.ycombinator.com/item?id=14508413', 'summary': '<a href="https://news.ycombinator.com/item?id=14508413">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14508413">Comments</a>'}}, {'title': 'Apple Announces Full WebRTC Support in Safari 11', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Apple Announces Full WebRTC Support in Safari 11'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://blog.peer5.com/apple-announces-support-for-webrtc-in-safari-11/'}], 'link': 'https://blog.peer5.com/apple-announces-support-for-webrtc-in-safari-11/', 'published': 'Wed, 7 Jun 2017 19:17:45 +0000', 'comments': 'https://news.ycombinator.com/item?id=14509100', 'summary': '<a href="https://news.ycombinator.com/item?id=14509100">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14509100">Comments</a>'}}, {'title': 'Reducers, transducers and core.async in Clojure', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Reducers, transducers and core.async in Clojure'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://eli.thegreenplace.net/2017/reducers-transducers-and-coreasync-in-clojure/'}], 'link': 'http://eli.thegreenplace.net/2017/reducers-transducers-and-coreasync-in-clojure/', 'published': 'Wed, 7 Jun 2017 13:04:39 +0000', 'comments': 'https://news.ycombinator.com/item?id=14506012', 'summary': '<a href="https://news.ycombinator.com/item?id=14506012">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14506012">Comments</a>'}}, {'title': 'Software Companies Tech Competency Matrix', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Software Companies Tech Competency Matrix'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://geshan.com.np/blog/2017/06/software-companies-tech-competency-matrix/'}], 'link': 'https://geshan.com.np/blog/2017/06/software-companies-tech-competency-matrix/', 'published': 'Wed, 7 Jun 2017 14:12:06 +0000', 'comments': 'https://news.ycombinator.com/item?id=14506563', 'summary': '<a href="https://news.ycombinator.com/item?id=14506563">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14506563">Comments</a>'}}, {'title': 'The future of MDN: a focus on web docs', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'The future of MDN: a focus on web docs'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://blog.mozilla.org/opendesign/future-mdn-focus-web-docs/'}], 'link': 'https://blog.mozilla.org/opendesign/future-mdn-focus-web-docs/', 'published': 'Wed, 7 Jun 2017 08:38:27 +0000', 'comments': 'https://news.ycombinator.com/item?id=14504604', 'summary': '<a href="https://news.ycombinator.com/item?id=14504604">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14504604">Comments</a>'}}, {'title': "Stanford's therapy chatbot for depression", 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': "Stanford's therapy chatbot for depression"}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://www.businessinsider.fr/us/stanford-therapy-chatbot-depression-anxiety-woebot-2017-6/'}], 'link': 'http://www.businessinsider.fr/us/stanford-therapy-chatbot-depression-anxiety-woebot-2017-6/', 'published': 'Wed, 7 Jun 2017 07:20:44 +0000', 'comments': 'https://news.ycombinator.com/item?id=14504306', 'summary': '<a href="https://news.ycombinator.com/item?id=14504306">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14504306">Comments</a>'}}, {'title': 'Conformity Excuses', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Conformity Excuses'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://www.overcomingbias.com/2017/06/conformity-excuses.html'}], 'link': 'http://www.overcomingbias.com/2017/06/conformity-excuses.html', 'published': 'Wed, 7 Jun 2017 09:36:47 +0000', 'comments': 'https://news.ycombinator.com/item?id=14504822', 'summary': '<a href="https://news.ycombinator.com/item?id=14504822">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14504822">Comments</a>'}}, {'title': 'Set up a malware analysis lab with VirtualBox, INetSim and Burp', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Set up a malware analysis lab with VirtualBox, INetSim and Burp'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://blog.christophetd.fr/set-up-your-own-malware-analysis-lab-with-virtualbox-inetsim-and-burp/'}], 'link': 'https://blog.christophetd.fr/set-up-your-own-malware-analysis-lab-with-virtualbox-inetsim-and-burp/', 'published': 'Wed, 7 Jun 2017 11:42:36 +0000', 'comments': 'https://news.ycombinator.com/item?id=14505406', 'summary': '<a href="https://news.ycombinator.com/item?id=14505406">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14505406">Comments</a>'}}, {'title': 'Show HN: ProximityHash – Geohashes in Proximity', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Show HN: ProximityHash – Geohashes in Proximity'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://github.com/ashwin711/proximityhash'}], 'link': 'https://github.com/ashwin711/proximityhash', 'published': 'Wed, 7 Jun 2017 18:13:06 +0000', 'comments': 'https://news.ycombinator.com/item?id=14508594', 'summary': '<a href="https://news.ycombinator.com/item?id=14508594">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14508594">Comments</a>'}}, {'title': 'The Boolean Satisfiability Problem and SAT Solvers', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'The Boolean Satisfiability Problem and SAT Solvers'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://0a.io/boolean-satisfiability-problem-or-sat-in-5-minutes/'}], 'link': 'http://0a.io/boolean-satisfiability-problem-or-sat-in-5-minutes/', 'published': 'Wed, 7 Jun 2017 18:07:30 +0000', 'comments': 'https://news.ycombinator.com/item?id=14508546', 'summary': '<a href="https://news.ycombinator.com/item?id=14508546">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14508546">Comments</a>'}}, {'title': 'Coursera raises Series D', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Coursera raises Series D'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://techcrunch.com/2017/06/07/online-learning-startup-coursera-raises-64m-at-an-800m-valuation/'}], 'link': 'https://techcrunch.com/2017/06/07/online-learning-startup-coursera-raises-64m-at-an-800m-valuation/', 'published': 'Wed, 7 Jun 2017 13:47:23 +0000', 'comments': 'https://news.ycombinator.com/item?id=14506383', 'summary': '<a href="https://news.ycombinator.com/item?id=14506383">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14506383">Comments</a>'}}, {'title': 'Another “don’t cargo cult” article', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Another “don’t cargo cult” article'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://blog.bradfieldcs.com/you-are-not-google-84912cf44afb'}], 'link': 'https://blog.bradfieldcs.com/you-are-not-google-84912cf44afb', 'published': 'Wed, 7 Jun 2017 17:34:19 +0000', 'comments': 'https://news.ycombinator.com/item?id=14508264', 'summary': '<a href="https://news.ycombinator.com/item?id=14508264">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14508264">Comments</a>'}}, {'title': 'Ghost in the Shell FUI Design', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Ghost in the Shell FUI Design'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://www.hudsandguis.com/home/2017/4/17/ghostintheshell-fui'}], 'link': 'http://www.hudsandguis.com/home/2017/4/17/ghostintheshell-fui', 'published': 'Wed, 7 Jun 2017 06:41:53 +0000', 'comments': 'https://news.ycombinator.com/item?id=14504163', 'summary': '<a href="https://news.ycombinator.com/item?id=14504163">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14504163">Comments</a>'}}, {'title': 'A day without JavaScript', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'A day without JavaScript'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://sonniesedge.co.uk/blog/a-day-without-javascript'}], 'link': 'https://sonniesedge.co.uk/blog/a-day-without-javascript', 'published': 'Wed, 7 Jun 2017 11:28:09 +0000', 'comments': 'https://news.ycombinator.com/item?id=14505315', 'summary': '<a href="https://news.ycombinator.com/item?id=14505315">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14505315">Comments</a>'}}, {'title': 'Cache Organization in Intel CPUs (2009)', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Cache Organization in Intel CPUs (2009)'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://duartes.org/gustavo/blog/post/intel-cpu-caches/'}], 'link': 'http://duartes.org/gustavo/blog/post/intel-cpu-caches/', 'published': 'Wed, 7 Jun 2017 13:50:34 +0000', 'comments': 'https://news.ycombinator.com/item?id=14506401', 'summary': '<a href="https://news.ycombinator.com/item?id=14506401">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14506401">Comments</a>'}}, {'title': "Einstein's Philosophy of Science (2014)", 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': "Einstein's Philosophy of Science (2014)"}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://plato.stanford.edu/entries/einstein-philscience/'}], 'link': 'https://plato.stanford.edu/entries/einstein-philscience/', 'published': 'Tue, 6 Jun 2017 18:38:23 +0000', 'comments': 'https://news.ycombinator.com/item?id=14499850', 'summary': '<a href="https://news.ycombinator.com/item?id=14499850">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14499850">Comments</a>'}}, {'title': 'Getting started with the F# and .Net ecosystem', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Getting started with the F# and .Net ecosystem'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://www.prigrammer.com/?p=363'}], 'link': 'http://www.prigrammer.com/?p=363', 'published': 'Wed, 7 Jun 2017 13:34:58 +0000', 'comments': 'https://news.ycombinator.com/item?id=14506287', 'summary': '<a href="https://news.ycombinator.com/item?id=14506287">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14506287">Comments</a>'}}, {'title': 'BuildZoom (YC W13 – build your dream home) is hiring a VP of Sales', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'BuildZoom (YC W13 – build your dream home) is hiring a VP of Sales'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://jobs.lever.co/buildzoom'}], 'link': 'https://jobs.lever.co/buildzoom', 'published': 'Wed, 7 Jun 2017 17:22:09 +0000', 'comments': 'https://news.ycombinator.com/item?id=14508141', 'summary': '<a href="https://news.ycombinator.com/item?id=14508141">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14508141">Comments</a>'}}, {'title': 'Edison, Clarence Dally, and the Hidden Perils of X-Rays (1903)', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Edison, Clarence Dally, and the Hidden Perils of X-Rays (1903)'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://web.archive.org/web/20120218234715/http://home.gwi.net/~dnb/read/edison/edison_xrays.htm'}], 'link': 'http://web.archive.org/web/20120218234715/http://home.gwi.net/~dnb/read/edison/edison_xrays.htm', 'published': 'Wed, 7 Jun 2017 03:04:41 +0000', 'comments': 'https://news.ycombinator.com/item?id=14503416', 'summary': '<a href="https://news.ycombinator.com/item?id=14503416">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14503416">Comments</a>'}}, {'title': 'Pharo 6.0 Released', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Pharo 6.0 Released'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'http://pharo.org/news/pharo6.0-released'}], 'link': 'http://pharo.org/news/pharo6.0-released', 'published': 'Wed, 7 Jun 2017 07:00:25 +0000', 'comments': 'https://news.ycombinator.com/item?id=14504244', 'summary': '<a href="https://news.ycombinator.com/item?id=14504244">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14504244">Comments</a>'}}, {'title': 'A curated list of design systems, pattern libraries, and more', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'A curated list of design systems, pattern libraries, and more'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://github.com/alexpate/awesome-design-systems'}], 'link': 'https://github.com/alexpate/awesome-design-systems', 'published': 'Wed, 7 Jun 2017 13:57:04 +0000', 'comments': 'https://news.ycombinator.com/item?id=14506458', 'summary': '<a href="https://news.ycombinator.com/item?id=14506458">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14506458">Comments</a>'}}, {'title': 'Comcast Has Always Opposed Internet Freedom', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Comcast Has Always Opposed Internet Freedom'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://www.eff.org/deeplinks/2017/06/dont-be-fooled-comcast-pr-machine-it-has-always-opposed-open-internet'}], 'link': 'https://www.eff.org/deeplinks/2017/06/dont-be-fooled-comcast-pr-machine-it-has-always-opposed-open-internet', 'published': 'Wed, 7 Jun 2017 14:46:30 +0000', 'comments': 'https://news.ycombinator.com/item?id=14506853', 'summary': '<a href="https://news.ycombinator.com/item?id=14506853">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14506853">Comments</a>'}}, {'title': 'Dasung Paperlike Pro: E-Ink Monitor with HDMI connector [video]', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'Dasung Paperlike Pro: E-Ink Monitor with HDMI connector [video]'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://www.youtube.com/watch?v=wj2Lvuc28k0'}], 'link': 'https://www.youtube.com/watch?v=wj2Lvuc28k0', 'published': 'Wed, 7 Jun 2017 12:33:22 +0000', 'comments': 'https://news.ycombinator.com/item?id=14505762', 'summary': '<a href="https://news.ycombinator.com/item?id=14505762">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14505762">Comments</a>'}}, {'title': 'List of Printers Which Do or Do Not Display Tracking Dots', 'title_detail': {'type': 'text/plain', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': 'List of Printers Which Do or Do Not Display Tracking Dots'}, 'links': [{'rel': 'alternate', 'type': 'text/html', 'href': 'https://www.eff.org/pages/list-printers-which-do-or-do-not-display-tracking-dots'}], 'link': 'https://www.eff.org/pages/list-printers-which-do-or-do-not-display-tracking-dots', 'published': 'Tue, 6 Jun 2017 22:12:51 +0000', 'comments': 'https://news.ycombinator.com/item?id=14501894', 'summary': '<a href="https://news.ycombinator.com/item?id=14501894">Comments</a>', 'summary_detail': {'type': 'text/html', 'language': None, 'base': 'https://news.ycombinator.com/rss', 'value': '<a href="https://news.ycombinator.com/item?id=14501894">Comments</a>'}}], 'bozo': 0, 'headers': {'Date': 'Wed, 07 Jun 2017 20:01:20 GMT', 'Content-Type': 'application/rss+xml', 'Transfer-Encoding': 'chunked', 'Connection': 'close', 'Set-Cookie': '__cfduid=d1c55ac10a1500623f632fe9f4feca9441496865679; expires=Thu, 07-Jun-18 20:01:19 GMT; path=/; domain=.ycombinator.com; HttpOnly', 'Cache-Control': 'private', 'X-Frame-Options': 'DENY', 'Strict-Transport-Security': 'max-age=31556900; includeSubDomains', 'Server': 'cloudflare-nginx', 'CF-RAY': '36b633e12e3c680f-EZE'}, 'href': 'https://news.ycombinator.com/rss', 'status': 200, 'encoding': 'utf-8', 'version': 'rss20', 'namespaces': {1}}
fake_response = munchify(feedparser_parse_response)
| 4,273.166667
| 25,469
| 0.68973
| 3,578
| 25,639
| 4.923421
| 0.125489
| 0.123978
| 0.175976
| 0.202373
| 0.933413
| 0.905257
| 0.846673
| 0.803304
| 0.722071
| 0.619891
| 0
| 0.064979
| 0.067826
| 25,639
| 5
| 25,470
| 5,127.8
| 0.671925
| 0.003354
| 0
| 0
| 0
| 29.666667
| 0.803804
| 0.003992
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 13
|
1a28d23d0d4e839f26e2338bdbb3f8f97cffe64e
| 29,109
|
py
|
Python
|
analysis/explan_test.py
|
SCAII/SCAII
|
7ab302f788556392850d104d3df6e0b4a556414d
|
[
"BSD-3-Clause"
] | 1
|
2017-11-01T20:09:32.000Z
|
2017-11-01T20:09:32.000Z
|
analysis/explan_test.py
|
SCAII/SCAII
|
7ab302f788556392850d104d3df6e0b4a556414d
|
[
"BSD-3-Clause"
] | 103
|
2017-09-14T17:04:53.000Z
|
2018-08-15T22:52:32.000Z
|
analysis/explan_test.py
|
SCAII/SCAII
|
7ab302f788556392850d104d3df6e0b4a556414d
|
[
"BSD-3-Clause"
] | 6
|
2018-01-31T00:05:14.000Z
|
2020-01-29T07:01:29.000Z
|
import unittest
from flatten import parse_line
import extractionMap as extractionMap
# selectedRewardBar: "tutorial.scr,9-13-2018,12:23:47:489,1536866627489,1,1.0,userClick:721_277;region:scaii-interface;target:rewardBar;selectedRewardBar:Attack Q1.Enemy Destroyed,false,false,false,false,false,false"
# clickSaliencyMap: "tutorial.scr,9-13-2018,12:29:33:803,1536866973803,1,1.0,userClick:817_509;region:saliencyMap;target:saliencyMap--DP1-2_EnemyDestroyed--Size;clickSaliencyMap:Size_(NA_upperRightQuadrant),false,false,false,false,false,false"
# "tutorial.scr,9-13-2018,12:30:6:570,1536867006570,1,1.0,userClick:771_514;region:saliencyMap;target:saliencyMap--DP1-2_EnemyDestroyed--Size;clickSaliencyMap:Size_(friendly-Big Fort_upperLeftQuadrant),false,false,false,false,false,false"
# startMouseOverSaliencyMap: "tutorial.scr,9-13-2018,12:23:49:897,1536866629897,1,1.0,region:saliencyMap;target:saliencyMap--DP1-2_EnemyDestroyed--Tank;startMouseOverSaliencyMap:Tank,false,false,false,false,false,false"
# endMouseOverSaliencyMap: "tutorial.scr,9-13-2018,12:23:49:910,1536866629910,1,1.0,region:saliencyMap;target:saliencyMap--DP1-2_EnemyDestroyed--Tank;endMouseOverSaliencyMap:Tank,false,false,false,false,false,false"
# waitForResearcherStart: "task1.scr,9-18-2018,13:23:21:67,1537302201067,1,61.0,region:waitScreen;target:enter-wait-screen;waitForResearcherStart:yes,false,false,false,false,false,false"
# waitForResearcherEnd: "task1.scr,9-18-2018,13:23:27:957,1537302207957,61,61.0,region:waitScreen;target:user-wait-button-continue;waitForResearcherEnd:yes,false,false,false,false,false,false"
class TestFlatteningExplan(unittest.TestCase):
def test_selectedRewardBar(self):
line = "tutorial.scr,9-13-2018,12:23:47:489,1536866627489,1,1.0,userClick:721_277;region:scaii-interface;target:rewardBar;selectedRewardBar:Attack Q1.Enemy Destroyed,false,false,false,false,false,false"
extraction_map = extractionMap.get_extraction_map()
obj = parse_line(line,extraction_map)
self.assertEqual(obj["stepIntoDecisionPoint"], "NA")
self.assertEqual(obj["showQuestion"], "NA")
self.assertEqual(obj["hideEntityTooltips"], "NA")
self.assertEqual(obj["showEntityTooltip.entityInfo"], "NA")
self.assertEqual(obj["showEntityTooltip.tipQuadrant"], "NA")
self.assertEqual(obj["startMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["endMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["waitForResearcherStart"], "NA")
self.assertEqual(obj["waitForResearcherEnd"], "NA")
self.assertEqual(obj["userClick"], "yes")
self.assertEqual(obj["userClick.coordX"], "721")
self.assertEqual(obj["userClick.coordY"], "277")
self.assertEqual(obj["userClick.region"], "scaii-interface")
self.assertEqual(obj["userClick.target"], "rewardBar")
self.assertEqual(obj["userClick.answerQuestion.clickStep"], "NA")
self.assertEqual(obj["userClick.answerQuestion.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer1"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer2"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.fileName"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.date"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.time"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.1970Sec"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.decisionPoint"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.region"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.target"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.timelineClick"], "NA")
self.assertEqual(obj["userClick.jumpToDecisionPoint"], "NA")
self.assertEqual(obj["userClick.clickTimeLineBlocker"], "NA")
self.assertEqual(obj["userClick.play"], "NA")
self.assertEqual(obj["userClick.pause"], "NA")
self.assertEqual(obj["userClick.touchStepProgressLabel"], "NA")
self.assertEqual(obj["userClick.clickGameQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.clickActionLabel"], "NA")
self.assertEqual(obj["userClick.clickActionLabelDenied"], "NA")
self.assertEqual(obj["userClick.selectedRewardBar"], "Attack Q1.Enemy Destroyed")
self.assertEqual(obj["userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.touchCumRewardLabel"], "NA")
self.assertEqual(obj["userClick.touchCumRewardValueFor"], "NA")
def test_clickSaliencyMap(self):
line = "tutorial.scr,9-13-2018,12:30:6:570,1536867006570,1,1.0,userClick:771_514;region:saliencyMap;target:saliencyMap--DP1-2_EnemyDestroyed--Size;clickSaliencyMap:Size_(friendly-Big Fort_upperLeftQuadrant),false,false,false,false,false,false"
#line = "tutorial.scr,9-13-2018,12:29:33:803,1536866973803,1,1.0,userClick:817_509;region:saliencyMap;target:saliencyMap--DP1-2_EnemyDestroyed--Size;clickSaliencyMap:Size_(NA_upperRightQuadrant),false,false,false,false,false,false"
extraction_map = extractionMap.get_extraction_map()
obj = parse_line(line,extraction_map)
self.assertEqual(obj["stepIntoDecisionPoint"], "NA")
self.assertEqual(obj["showQuestion"], "NA")
self.assertEqual(obj["hideEntityTooltips"], "NA")
self.assertEqual(obj["showEntityTooltip.entityInfo"], "NA")
self.assertEqual(obj["showEntityTooltip.tipQuadrant"], "NA")
self.assertEqual(obj["startMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["endMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["waitForResearcherStart"], "NA")
self.assertEqual(obj["waitForResearcherEnd"], "NA")
self.assertEqual(obj["userClick"], "yes")
self.assertEqual(obj["userClick.coordX"], "771")
self.assertEqual(obj["userClick.coordY"], "514")
self.assertEqual(obj["userClick.region"], "saliencyMap")
self.assertEqual(obj["userClick.target"], "saliencyMap--DP1-2_EnemyDestroyed--Size")
self.assertEqual(obj["userClick.answerQuestion.clickStep"], "NA")
self.assertEqual(obj["userClick.answerQuestion.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer1"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer2"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.fileName"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.date"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.time"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.1970Sec"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.decisionPoint"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.region"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.target"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.timelineClick"], "NA")
self.assertEqual(obj["userClick.jumpToDecisionPoint"], "NA")
self.assertEqual(obj["userClick.clickTimeLineBlocker"], "NA")
self.assertEqual(obj["userClick.play"], "NA")
self.assertEqual(obj["userClick.pause"], "NA")
self.assertEqual(obj["userClick.touchStepProgressLabel"], "NA")
self.assertEqual(obj["userClick.clickGameQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.clickActionLabel"], "NA")
self.assertEqual(obj["userClick.clickActionLabelDenied"], "NA")
self.assertEqual(obj["userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap"], "Size")
self.assertEqual(obj["userClick.clickSaliencyMap.clickGameEntity"], "friendly-Big Fort")
self.assertEqual(obj["userClick.clickSaliencyMap.clickQuadrant"], "upperLeftQuadrant")
self.assertEqual(obj["userClick.touchCumRewardLabel"], "NA")
self.assertEqual(obj["userClick.touchCumRewardValueFor"], "NA")
def test_startMouseOverSaliencyMap(self):
line = "tutorial.scr,9-13-2018,12:23:49:897,1536866629897,1,1.0,region:saliencyMap;target:saliencyMap--DP1-2_EnemyDestroyed--Tank;startMouseOverSaliencyMap:Tank,false,false,false,false,false,false"
extraction_map = extractionMap.get_extraction_map()
obj = parse_line(line,extraction_map)
self.assertEqual(obj["stepIntoDecisionPoint"], "NA")
self.assertEqual(obj["showQuestion"], "NA")
self.assertEqual(obj["hideEntityTooltips"], "NA")
self.assertEqual(obj["showEntityTooltip.entityInfo"], "NA")
self.assertEqual(obj["showEntityTooltip.tipQuadrant"], "NA")
self.assertEqual(obj["startMouseOverSaliencyMap"], "Tank")
self.assertEqual(obj["endMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["waitForResearcherStart"], "NA")
self.assertEqual(obj["waitForResearcherEnd"], "NA")
self.assertEqual(obj["userClick"], "NA")
self.assertEqual(obj["userClick.coordX"], "NA")
self.assertEqual(obj["userClick.coordY"], "NA")
self.assertEqual(obj["userClick.region"], "saliencyMap")
self.assertEqual(obj["userClick.target"], "saliencyMap--DP1-2_EnemyDestroyed--Tank")
self.assertEqual(obj["userClick.answerQuestion.clickStep"], "NA")
self.assertEqual(obj["userClick.answerQuestion.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer1"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer2"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.fileName"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.date"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.time"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.1970Sec"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.decisionPoint"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.region"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.target"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.timelineClick"], "NA")
self.assertEqual(obj["userClick.jumpToDecisionPoint"], "NA")
self.assertEqual(obj["userClick.clickTimeLineBlocker"], "NA")
self.assertEqual(obj["userClick.play"], "NA")
self.assertEqual(obj["userClick.pause"], "NA")
self.assertEqual(obj["userClick.touchStepProgressLabel"], "NA")
self.assertEqual(obj["userClick.clickGameQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.clickActionLabel"], "NA")
self.assertEqual(obj["userClick.clickActionLabelDenied"], "NA")
self.assertEqual(obj["userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.touchCumRewardLabel"], "NA")
self.assertEqual(obj["userClick.touchCumRewardValueFor"], "NA")
def test_endMouseOverSaliencyMap(self):
line = "tutorial.scr,9-13-2018,12:23:49:910,1536866629910,1,1.0,region:saliencyMap;target:saliencyMap--DP1-2_EnemyDestroyed--Tank;endMouseOverSaliencyMap:Tank,false,false,false,false,false,false"
extraction_map = extractionMap.get_extraction_map()
obj = parse_line(line,extraction_map)
self.assertEqual(obj["stepIntoDecisionPoint"], "NA")
self.assertEqual(obj["showQuestion"], "NA")
self.assertEqual(obj["hideEntityTooltips"], "NA")
self.assertEqual(obj["showEntityTooltip.entityInfo"], "NA")
self.assertEqual(obj["showEntityTooltip.tipQuadrant"], "NA")
self.assertEqual(obj["startMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["endMouseOverSaliencyMap"], "Tank")
self.assertEqual(obj["waitForResearcherStart"], "NA")
self.assertEqual(obj["waitForResearcherEnd"], "NA")
self.assertEqual(obj["userClick"], "NA")
self.assertEqual(obj["userClick.coordX"], "NA")
self.assertEqual(obj["userClick.coordY"], "NA")
self.assertEqual(obj["userClick.region"], "saliencyMap")
self.assertEqual(obj["userClick.target"], "saliencyMap--DP1-2_EnemyDestroyed--Tank")
self.assertEqual(obj["userClick.answerQuestion.clickStep"], "NA")
self.assertEqual(obj["userClick.answerQuestion.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer1"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer2"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.fileName"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.date"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.time"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.1970Sec"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.decisionPoint"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.region"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.target"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.timelineClick"], "NA")
self.assertEqual(obj["userClick.jumpToDecisionPoint"], "NA")
self.assertEqual(obj["userClick.clickTimeLineBlocker"], "NA")
self.assertEqual(obj["userClick.play"], "NA")
self.assertEqual(obj["userClick.pause"], "NA")
self.assertEqual(obj["userClick.touchStepProgressLabel"], "NA")
self.assertEqual(obj["userClick.clickGameQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.clickActionLabel"], "NA")
self.assertEqual(obj["userClick.clickActionLabelDenied"], "NA")
self.assertEqual(obj["userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.touchCumRewardLabel"], "NA")
self.assertEqual(obj["userClick.touchCumRewardValueFor"], "NA")
def test_waitForResearcherStart(self):
line = "task1.scr,9-18-2018,13:23:21:67,1537302201067,1,61.0,region:waitScreen;target:enter-wait-screen;waitForResearcherStart:yes,false,false,false,false,false,false"
extraction_map = extractionMap.get_extraction_map()
obj = parse_line(line,extraction_map)
self.assertEqual(obj["stepIntoDecisionPoint"], "NA")
self.assertEqual(obj["showQuestion"], "NA")
self.assertEqual(obj["hideEntityTooltips"], "NA")
self.assertEqual(obj["showEntityTooltip.entityInfo"], "NA")
self.assertEqual(obj["showEntityTooltip.tipQuadrant"], "NA")
self.assertEqual(obj["startMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["endMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["waitForResearcherStart"], "yes")
self.assertEqual(obj["waitForResearcherEnd"], "NA")
self.assertEqual(obj["userClick"], "NA")
self.assertEqual(obj["userClick.coordX"], "NA")
self.assertEqual(obj["userClick.coordY"], "NA")
self.assertEqual(obj["userClick.region"], "waitScreen")
self.assertEqual(obj["userClick.target"], "enter-wait-screen")
self.assertEqual(obj["userClick.answerQuestion.clickStep"], "NA")
self.assertEqual(obj["userClick.answerQuestion.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer1"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer2"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.fileName"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.date"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.time"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.1970Sec"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.decisionPoint"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.region"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.target"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.timelineClick"], "NA")
self.assertEqual(obj["userClick.jumpToDecisionPoint"], "NA")
self.assertEqual(obj["userClick.clickTimeLineBlocker"], "NA")
self.assertEqual(obj["userClick.play"], "NA")
self.assertEqual(obj["userClick.pause"], "NA")
self.assertEqual(obj["userClick.touchStepProgressLabel"], "NA")
self.assertEqual(obj["userClick.clickGameQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.clickActionLabel"], "NA")
self.assertEqual(obj["userClick.clickActionLabelDenied"], "NA")
self.assertEqual(obj["userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.touchCumRewardLabel"], "NA")
self.assertEqual(obj["userClick.touchCumRewardValueFor"], "NA")
def test_waitForResearcherEnd(self):
line = "task1.scr,9-18-2018,13:23:27:957,1537302207957,61,61.0,region:waitScreen;target:user-wait-button-continue;waitForResearcherEnd:yes,false,false,false,false,false,false"
extraction_map = extractionMap.get_extraction_map()
obj = parse_line(line,extraction_map)
self.assertEqual(obj["stepIntoDecisionPoint"], "NA")
self.assertEqual(obj["showQuestion"], "NA")
self.assertEqual(obj["hideEntityTooltips"], "NA")
self.assertEqual(obj["showEntityTooltip.entityInfo"], "NA")
self.assertEqual(obj["showEntityTooltip.tipQuadrant"], "NA")
self.assertEqual(obj["startMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["endMouseOverSaliencyMap"], "NA")
self.assertEqual(obj["waitForResearcherStart"], "NA")
self.assertEqual(obj["waitForResearcherEnd"], "yes")
self.assertEqual(obj["userClick"], "NA")
self.assertEqual(obj["userClick.coordX"], "NA")
self.assertEqual(obj["userClick.coordY"], "NA")
self.assertEqual(obj["userClick.region"], "waitScreen")
self.assertEqual(obj["userClick.target"], "user-wait-button-continue")
self.assertEqual(obj["userClick.answerQuestion.clickStep"], "NA")
self.assertEqual(obj["userClick.answerQuestion.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer1"], "NA")
self.assertEqual(obj["userClick.answerQuestion.answer2"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.fileName"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.date"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.time"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.1970Sec"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.decisionPoint"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.questionId"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.region"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.target"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.answerQuestion.userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.timelineClick"], "NA")
self.assertEqual(obj["userClick.jumpToDecisionPoint"], "NA")
self.assertEqual(obj["userClick.clickTimeLineBlocker"], "NA")
self.assertEqual(obj["userClick.play"], "NA")
self.assertEqual(obj["userClick.pause"], "NA")
self.assertEqual(obj["userClick.touchStepProgressLabel"], "NA")
self.assertEqual(obj["userClick.clickGameQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickEntity.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordX"], "NA")
self.assertEqual(obj["userClick.clickEntity.coordY"], "NA")
self.assertEqual(obj["userClick.clickActionLabel"], "NA")
self.assertEqual(obj["userClick.clickActionLabelDenied"], "NA")
self.assertEqual(obj["userClick.selectedRewardBar"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickGameEntity"], "NA")
self.assertEqual(obj["userClick.clickSaliencyMap.clickQuadrant"], "NA")
self.assertEqual(obj["userClick.touchCumRewardLabel"], "NA")
self.assertEqual(obj["userClick.touchCumRewardValueFor"], "NA")
| 65.560811
| 268
| 0.714453
| 2,863
| 29,109
| 7.242403
| 0.044708
| 0.243067
| 0.291681
| 0.293224
| 0.977767
| 0.968941
| 0.963251
| 0.963058
| 0.962672
| 0.961514
| 0
| 0.022238
| 0.130269
| 29,109
| 443
| 269
| 65.708804
| 0.796777
| 0.062043
| 0
| 0.909341
| 0
| 0.016484
| 0.487817
| 0.424358
| 0
| 0
| 0
| 0
| 0.923077
| 1
| 0.016484
| false
| 0
| 0.008242
| 0
| 0.027473
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
c557e9f484e9814750e96efe16f86d582118aeb7
| 203
|
py
|
Python
|
services/others/lamdsh/gettrans/core.py
|
X5GON/lamapi
|
0558c3b7af520ab83bdbd29e1b1b9b87bdc147b0
|
[
"BSD-2-Clause"
] | null | null | null |
services/others/lamdsh/gettrans/core.py
|
X5GON/lamapi
|
0558c3b7af520ab83bdbd29e1b1b9b87bdc147b0
|
[
"BSD-2-Clause"
] | null | null | null |
services/others/lamdsh/gettrans/core.py
|
X5GON/lamapi
|
0558c3b7af520ab83bdbd29e1b1b9b87bdc147b0
|
[
"BSD-2-Clause"
] | null | null | null |
from components.dataconnection.index import get_transcription_forlamdash
# Get resTrans for lamdsh
def get_resource_lamdshtrans(resource_infos):
return get_transcription_forlamdash(resource_infos)
| 29
| 72
| 0.866995
| 24
| 203
| 7
| 0.666667
| 0.190476
| 0.309524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093596
| 203
| 6
| 73
| 33.833333
| 0.913043
| 0.1133
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
3dc0c1fbe9f455c5a7cf75fd24ac3766996955a9
| 64,050
|
py
|
Python
|
chunair/kicad-footprint-generator-master/scripts/Potentiometers/make_Potentiometer_THT.py
|
speedypotato/chuni-lite
|
c8dda8428723f8c4f99075e7cbaa22a44cbc187d
|
[
"CC-BY-4.0"
] | 2
|
2022-03-18T23:42:51.000Z
|
2022-03-19T15:31:34.000Z
|
chunair/kicad-footprint-generator-master/scripts/Potentiometers/make_Potentiometer_THT.py
|
speedypotato/chuni-lite
|
c8dda8428723f8c4f99075e7cbaa22a44cbc187d
|
[
"CC-BY-4.0"
] | null | null | null |
chunair/kicad-footprint-generator-master/scripts/Potentiometers/make_Potentiometer_THT.py
|
speedypotato/chuni-lite
|
c8dda8428723f8c4f99075e7cbaa22a44cbc187d
|
[
"CC-BY-4.0"
] | null | null | null |
#!/usr/bin/env python
import sys
import os
import math
# ensure that the kicad-footprint-generator directory is available
#sys.path.append(os.environ.get('KIFOOTPRINTGENERATOR')) # enable package import from parent directory
#sys.path.append("D:\hardware\KiCAD\kicad-footprint-generator") # enable package import from parent directory
sys.path.append(os.path.join(sys.path[0],"..","..","kicad_mod")) # load kicad_mod path
sys.path.append(os.path.join(sys.path[0],"..","..")) # load kicad_mod path
sys.path.append(os.path.join(sys.path[0],"..","tools")) # load kicad_mod path
from KicadModTree import * # NOQA
from drawing_tools import *
from footprint_scripts_potentiometers import *
if __name__ == '__main__':
script3d_tsl="trimmer_screwleft.py"
with open(script3d_tsl, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_tst="trimmer_screwtop.py"
with open(script3d_tst, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_tsl_smd="trimmer_screwleft_smd.py"
with open(script3d_tsl_smd, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_tst_smd="trimmer_screwtop_smd.py"
with open(script3d_tst_smd, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_pv="pots_ver.py"
with open(script3d_pv, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_trv="trim_round_ver.py"
with open(script3d_trv, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_trh="trim_round_hor.py"
with open(script3d_trh, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_trh_bel="trim_round_hor_below.py"
with open(script3d_trh_bel, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_trh_smd="trim_round_smd_hor.py"
with open(script3d_trh_smd, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_trh_smd_bel="trim_round_smd_hor_below.py"
with open(script3d_trh_smd_bel, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_ph_bel="pots_hor_below.py"
with open(script3d_ph_bel, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
script3d_ph="pots_hor.py"
with open(script3d_ph, "w") as myfile:
myfile.write("#\n# SCRIPT to generate 3D models\n#\n\n")
R_POW = 0
class_name="Omeg PC16BU"; add_description="http://www.omeg.co.uk/pc6bubrc.htm"
pins = 3; rmx=5.0; rmy=5.; ddrill=1.3; wbody=9.3; hbody=16.9; height3d = 21; screwzpos = 12.5; wscrew=6; dscrew=7
wshaft=50-wscrew; dshaft=4; pinxoffset=6.3; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
voffsetx=1.75; dbody=16.9; vwbody=5; vpinyoffset=(hbody-2*rmy)/2.0; c_offsety=dbody/2.0; c_offsetx=10.8
#makePotentiometerVertical(shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
makePotentiometerVertical(shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph,height3d=height3d)
class_name="Vishay 248GJ-249GJ Single"; add_description="http://www.vishay.com/docs/57054/248249.pdf"
pins = 3; rmx=7.62; rmy=2.54; ddrill=1; wbody=7.6; hbody=12.5; height3d = 13.1; screwzpos = 12.7/2.0+0.6; wscrew=9.5; dscrew=(3/8.0)*25.4
wshaft=22.22-wscrew; dshaft=(1/4.0)*25.4; pinxoffset=5.08; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
voffsetx=0.75; dbody=0; vwbody=12.7; vpinyoffset=(hbody-2*rmy)/2.0; c_offsety=hbody/2.0; c_offsetx=vwbody/2.0
#makePotentiometerVertical(shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
makePotentiometerVertical(shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph,height3d=height3d)
class_name="Vishay 248BH-249BH Single"; add_description="http://www.vishay.com/docs/57054/248249.pdf"
wscrew=9.5; dscrew=0.25*25.4; wshaft=19.05-wscrew; dshaft=3.18; pinxoffset=5.08; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Vishay 148-149 Single"; add_description="http://www.vishay.com/docs/57040/148149.pdf"
pins = 3; rmx=7.62; rmy=2.54; ddrill=1; wbody=8.83; hbody=12.5; height3d = 13.1; screwzpos = 12.5/2.0+0.6; wscrew=6.35; dscrew=0.25*25.4
wshaft=12.8-wscrew; dshaft=3.17; pinxoffset=5.08; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
voffsetx=0.75; dbody=0; vwbody=12.5; vpinyoffset=(hbody-2*rmy)/2.0; c_offsety=hbody/2.0; c_offsetx=vwbody/2.0
#makePotentiometerVertical(shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
makePotentiometerVertical(shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph,height3d=height3d)
class_name="Vishay 148E-149E Single"
wbody = 6.35 + 3.85 + 1.52 + 0.5
makePotentiometerHorizontal(mh_ddrill=1.3, mh_count=4, mh_rmx=3.85+6.35, mh_rmy=10.16, mh_xoffset=3.85, mh_yoffset=(10.16-2*rmy)/2.0, class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Vishay 148-149 Dual"
pins = 6; wbody=16.45; wscrew=7
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Vishay 148E-149E Dual"
wbody = 6.35+7.62+3.85+1.52+0.5
makePotentiometerHorizontal(mh_ddrill=1.3, mh_count=4, mh_rmx=3.85+7.62+6.35, mh_rmy=10.16, mh_xoffset=3.85, mh_yoffset=(10.16-2*rmy)/2.0, class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PC-16 Single"; add_description="http://www.piher-nacesa.com/pdf/20-PC16v03.pdf"
pins = 3; rmx=7.5; rmy=5.0; ddrill=1.3; wbody=8; hbody=16; height3d = 20.5; screwzpos = 12.5; wscrew=9; dscrew=10
wshaft=25-wscrew; dshaft=6; pinxoffset=6.5; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
voffsetx = 0.5; dbody = 0; vwbody = 18; vpinyoffset = (hbody-2*rmy)/2.0; c_offsetx = 10; c_offsety = hbody/2.0
#makePotentiometerVertical(shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
makePotentiometerVertical(shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph,height3d=height3d)
#class_name="Piher PC-16SV Single"
#voffsetx=0.5; dbody=0; vwbody=18; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=10; c_offsety=hbody/2.0
#makePotentiometerVertical(mh_ddrill=1.3, mh_count=2, mh_rmx=0, mh_rmy=10.0, mh_xoffset=15, mh_yoffset=(10-2*rmy)/2.0, shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Piher PC-16 Dual"
pins = 6; wbody=16
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PC-16 Triple"
pins = 9; wbody=24
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher T-16H Single"; add_description="http://www.piher-nacesa.com/pdf/22-T16v03.pdf"
pins = 3; rmx=7.5; rmy=5.0; ddrill=1.3; wbody=7.5; hbody=16; height3d = 21; screwzpos = 12.5; wscrew=5; dscrew=7
wshaft=15-wscrew; dshaft=4; pinxoffset=1.5; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher T-16L Single"
voffsetx=-0.5; dbody=16; vwbody=3; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=10.5; c_offsety=hbody/2.0
makePotentiometerVertical(shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Piher T-16H Double"
pins = 6; wbody=15
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Alps RK163 Single"; add_description="http://www.alps.com/prod/info/E/HTML/Potentiometer/RotaryPotentiometers/RK16/RK16_list.html"
pins = 3; rmx=5.0; rmy=5.0; ddrill=1.3; wbody=10.5; hbody=17.9; height3d = 21; screwzpos = 12.5; wscrew=5; dscrew=7
wshaft=15-wscrew; dshaft=6; pinxoffset=3.8; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Alps RK163 Dual"
pins = 6; wbody=12.1; wscrew=7
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Alps RK097 Single"; add_description="http://www.alps.com/prod/info/E/HTML/Potentiometer/RotaryPotentiometers/RK097/RK097_list.html"
pins = 3; rmx=2.5; rmy=2.5; ddrill=1; wbody=7.05; hbody=9.5; height3d = 6.5+0.25+4.85; screwzpos = 6.5+0.25; wscrew=5; dscrew=7
wshaft=15-wscrew; dshaft=6; pinxoffset=5; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Alps RK097 Dual"
pins = 6; wbody=9.55
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Bourns PTV09A-2 Single"; add_description="http://www.bourns.com/docs/Product-Datasheets/ptv09.pdf"
pins = 3; rmx=2.5; rmy=2.5; ddrill=1; wbody=5; hbody=9.7; height3d = 10+5.5; screwzpos = 10; wscrew=0.8; dscrew=6.8
wshaft=15-wbody-wscrew; dshaft=6; pinxoffset=3.5; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(mh_ddrill=2.3, mh_count=2, mh_rmx=0, mh_rmy=10, mh_xoffset=-3.3, mh_yoffset=(10-2*rmy)/2.0, class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Bourns PTV09A-1 Single"
voffsetx=1; dbody=0; vwbody=12; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=6.5; c_offsety=hbody/2.0
makePotentiometerVertical(mh_ddrill=2, mh_count=2, mh_rmx=0, mh_rmy=8.8, mh_xoffset=7, mh_yoffset=(8.8-2*rmy)/2.0, shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Alps RK09K Single"; add_description="http://www.alps.com/prod/info/E/HTML/Potentiometer/RotaryPotentiometers/RK09K/RK09K_list.html"
pins = 3; rmx=2.5; rmy=2.5; ddrill=1; wbody=6.8; hbody=9.8; height3d = 6.5+5.5; screwzpos = 6.5; wscrew=0.8; dscrew=6.5
wshaft=15-6.8-wscrew; dshaft=6; pinxoffset=3.4; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(mh_ddrill=2.3, mh_count=2, mh_rmx=0, mh_rmy=10, mh_xoffset=-3.3, mh_yoffset=(10-2*rmy)/2.0, class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Alps RK09K Single"
voffsetx=1; dbody=0; vwbody=12; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=6.5; c_offsety=hbody/2.0
makePotentiometerVertical(mh_ddrill=2, mh_count=2, mh_rmx=0, mh_rmy=8.8, mh_xoffset=7, mh_yoffset=(8.8-2*rmy)/2.0, shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Alps RK09L Single"; add_description="http://www.alps.com/prod/info/E/HTML/Potentiometer/RotaryPotentiometers/RK09L/RK09L_list.html"
pins = 3; rmx=2.5; rmy=2.5; ddrill=1; wbody=7.45; hbody=12.1; height3d = 6.5+0.25+4.85; screwzpos = 6.5+0.25; wscrew=5; dscrew=9
wshaft=15-wscrew; dshaft=6; pinxoffset=5; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(mh_ddrill=2.1, mh_count=2, mh_rmx=0, mh_rmy=9.5, mh_xoffset=-4.1, mh_yoffset=(9.5-2*rmy)/2.0, class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Alps RK09L Single"
voffsetx=1; dbody=0; vwbody=11.35; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=6.5; c_offsety=hbody/2.0
makePotentiometerVertical(mh_ddrill=2, mh_count=2, mh_rmx=0, mh_rmy=9.5, mh_xoffset=7.5, mh_yoffset=(9.5-2*rmy)/2.0, shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Alps RK09L Double"
pins = 6; wbody=9.14
makePotentiometerHorizontal(mh_ddrill=2.1, mh_count=2, mh_rmx=0, mh_rmy=9.5, mh_xoffset=-5.8, mh_yoffset=(9.5-2*rmy)/2.0, class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Alps RK09L Double"
voffsetx=1; dbody=0; vwbody=11.35; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=6.5; c_offsety=hbody/2.0
makePotentiometerVertical(mh_ddrill=2, mh_count=2, mh_rmx=0, mh_rmy=9.5, mh_xoffset=7.5, mh_yoffset=(9.5-2*rmy)/2.0, shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=dscrew+0.5,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Alps RK09Y11 Single"; add_description="http://www.alps.com/prod/info/E/HTML/Potentiometer/RotaryPotentiometers/RK09Y11/RK09Y11_list.html"
pins = 3; rmx=5.0; rmy=2.5; ddrill=1.0; wbody=5.4; hbody=9.5; height3d = 6.25+0.25+4.85; screwzpos = 6.25+0.25; wscrew=5; dscrew=7
wshaft=12-wscrew; dshaft=5; pinxoffset=3.45; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_pv,height3d=height3d, screwzpos=screwzpos)
class_name="Bourns 3339S"; add_description='http://www.bourns.com/docs/Product-Datasheets/3339.pdf'
pins = 3; rmx=-2.54; rmy=2.54; ddrill=0.8; wbody=5.97; hbody=8.13; dbody=0; height3d = 9.53; screwzpos = 5.54; wscrew=8-5.97; dscrew=7.62
wshaft=0; dshaft=4; pinxoffset=+4.57-wscrew; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
rmx=2.54; pinxoffset=+4.57-wscrew+2.54
class_name="Bourns 3339W"
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Bourns 3339P"
pins = 3; rmx=-2.54; rmy=2.54; ddrill=0.7; wbody=0; hbody=7.62; dbody=7.62; height3d = 6.35; wscrew=-wbody; dscrew=5
wshaft=0; dshaft=0; pinxoffset=0; pinyoffset=(hbody-2*rmy)/2.0
voffsetx=-rmx; vwbody=0; pinyoffset=(hbody-2*rmy)/2.0; c_offsetx=rmx; c_offsety=hbody/2.0; c_ddrill=2
makePotentiometerVertical(screwstyle='slit', style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Bourns 3339H"
rmx = 2.54 / math.sqrt(2); rmy = 2.54 / math.sqrt(2); voffsetx = rmx*2; vwbody = 0
pinyoffset = (hbody - 2 * rmy)/2.0; c_offsetx=-rmx; c_offsety=hbody/2.0; c_ddrill=2
makePotentiometerVertical(screwstyle='slit', style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Vishay T7-YA Single"; add_description="http://www.vishay.com/docs/51015/t7.pdf"
pins = 3; rmx=2.54; rmy=2.54; ddrill=0.8; wbody=0; hbody=7; dbody=7; height3d = 5.85; wscrew=-wbody; dscrew=4.1
wshaft=0; dshaft=0; pinxoffset=0; pinyoffset=(hbody-2*rmy)/2.0
voffsetx=-rmx; vwbody=0; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=2.5; c_offsety=hbody/2.0; c_ddrill=2
makePotentiometerVertical(screwstyle='slit', style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Bourns 3386X"; add_description="https://www.bourns.com/pdfs/3386.pdf"
pins = 3; rmx=2.54; rmy=2.54; ddrill=0.8; wbody=-4.83; hbody=9.53; dbody=0; height3d = 9.53; screwzpos = 5.33; wscrew=0; dscrew=3.15
wshaft=0; dshaft=0; pinxoffset=-(4.83-2.54)/2.0; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name = "Bourns 3386C"
rmx=0; pinxoffset=-4.83/2.0
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Bourns 3386P"
rmx=2.54; dbody=0; voffsetx=-4.78; vwbody=9.53; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=9.53-5.64; c_offsety=hbody/2.0; c_ddrill=2; height3d = 4.83
makePotentiometerVertical(screwstyle="slit", style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Bourns 3386F"
rmx=5.08; voffsetx=-9.53+5.08+2.34
makePotentiometerVertical(screwstyle="slit", style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Vishay T73XX"; add_description="http://www.vishay.com/docs/51016/t73.pdf"
pins = 3; rmx=2.54; rmy=2.54; ddrill=0.8; wbody=-4.7; hbody=6.6; dbody=0; height3d = 7; screwzpos = 3.8; wscrew=0; dscrew=3
wshaft=0; dshaft=0; pinxoffset=-1.02; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Vishay T73XW"
rmx=0; pinxoffset=-2.35
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Vishay T73YP"
rmx=2.54; dbody=0; voffsetx=-3.56; vwbody=7; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=3.8; c_offsety=hbody/2.0; c_ddrill=2
makePotentiometerVertical(screwstyle="cross", style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_ph_bel,height3d=height3d)
class_name="Piher PT-6-H"; add_description="http://www.piher-nacesa.com/pdf/11-PT6v03.pdf"
pins = 3; rmx=2.5; rmy=2.5; ddrill=0.9; wbody=-3.5; hbody=6.3; dbody=6.3; height3d = 4.5+dbody/2.0; screwzpos = 4.5; wscrew=-wbody; dscrew=2
wshaft=0; dshaft=1.8; pinxoffset=0; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PT-6-V"
rmx=5; dbody=6.3; voffsetx=0; vwbody=0; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=2.5; c_offsety=hbody/2.0; c_ddrill=2; height3d=4
makePotentiometerVertical(style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh,height3d=height3d)
makePotentiometerVertical(style="trimmer", shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh_bel,height3d=height3d)
class_name="Piher PT-10-H01"; add_description="http://www.piher-nacesa.com/pdf/12-PT10v03.pdf"
pins = 3; rmx=2.5; rmy=2.5; ddrill=1.3; wbody=-4.8; hbody=10.3; height3d = 12.1; screwzpos = 7; dbody=10.3; wscrew=-wbody; dscrew=3.5
wshaft=0; dshaft=3; pinxoffset=0; pinyoffset=(hbody-2*rmy)/2.0
#name_additions=["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2*rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PT-10-H05"
rmx=5; height3d = 12.1; screwzpos = 7; pinyoffset=(hbody-2*rmy)/2.0
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PT-101-H3.8"
rmx=3.8; height3d = 17.1; screwzpos = 9.6; pinyoffset=(hbody-2*rmy)/2.0
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
#makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PT-10-V10"
hbody=10; rmx=10; dbody=10.3; voffsetx=0; vwbody=0; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=5; c_offsety=hbody/2.0; c_ddrill=4; height3d=5.3
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerVertical(style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh,height3d=height3d)
makePotentiometerVertical(style="trimmer", shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh_bel,height3d=height3d)
class_name="Piher PT-10-V05"
voffsetx = -5.3/2.0; hbody=10; rmx=5; dbody=10.3; vwbody=0; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=10.3/2.0; c_offsety=dbody/2.0; c_ddrill=3
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerVertical(style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh_bel,height3d=height3d)
#name_additions = []
class_name="Piher PT-15-H05"; add_description="http://www.piher-nacesa.com/pdf/14-PT15v03.pdf"
pins = 3; rmx=5.0; rmy=5; ddrill=1.3; wbody=-5; hbody=15; height3d = 17.5; screwzpos = 10; dbody=15; wscrew=-wbody; dscrew=6
wshaft=0; dshaft=4.4; pinxoffset=0; pinyoffset=(hbody-2*rmy)/2.0
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PT-15-H01"
rmy=5; rmx=2.5; height3d = 17.5; screwzpos = 10; pinyoffset=(hbody-2*rmy)/2.0
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PT-15-H06"
rmy=4.4; rmx=4; height3d = 17.1; screwzpos = 9.6; pinyoffset=(hbody-2*rmy)/2.0
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PT-15-H25"
rmy=5; rmx=5; height3d = 20; screwzpos = 12.5; pinyoffset=(hbody-2*rmy)/2.0
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="Piher PT-15-V02"
hbody=10; rmx=12.5; dbody=15; voffsetx=0; vwbody=0; vpinyoffset=(dbody-2*rmy)/2.0; c_offsetx=7.5; c_offsety=dbody/2.0; c_ddrill=7; height3d=5.5
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerVertical(style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh,height3d=height3d)
makePotentiometerVertical(style="trimmer", shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh_bel,height3d=height3d)
class_name="Piher PT-15-V15"
voffsetx =0; hbody=15; rmx=15; dbody=15; vwbody=0; vpinyoffset=(dbody-2*rmy)/2.0; c_offsetx=7.5; c_offsety=dbody/2.0; c_ddrill=7
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerVertical(style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh,height3d=height3d)
makePotentiometerVertical(style="trimmer", shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh_bel,height3d=height3d)
#name_additions=[]
class_name="ACP CA6-H2,5"; add_description="http://www.acptechnologies.com/wp-content/uploads/2017/06/01-ACP-CA6.pdf"
pins = 3; rmx=2.5; rmy=2.5; ddrill=0.9; wbody=-3.5; hbody=6.3; dbody=0; height3d = 4.5+hbody/2.0; screwzpos = 4.5; wscrew=-wbody; dscrew=2
wshaft=0; dshaft=1.8; pinxoffset=0; pinyoffset=(hbody-2*rmy)/2.0
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="ACP CA9-H2,5"; add_description="http://www.acptechnologies.com/wp-content/uploads/2017/05/02-ACP-CA9-CE9.pdf"
pins = 3; rmx=2.5; rmy=2.5; ddrill=1.3; wbody=-4.8; hbody=9.8; dbody=0; height3d = 12; screwzpos = 7; wscrew=-wbody; dscrew=3
wshaft=0; dshaft=2.1; pinxoffset=0; pinyoffset=(hbody-2*rmy)/2.0
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="ACP CA9-H3,8"
rmx=3.8; height3d = 12
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="ACP CA9-H5"
rmx=5; height3d = 12
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="ACP CA9-V10"
rmx=10; dbody=0; voffsetx=0; vwbody=10; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=vwbody/2.0; c_offsety=hbody/2.0; c_ddrill=4; height3d=7.2
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerVertical(style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=vpinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh,height3d=height3d)
makePotentiometerVertical(style="trimmer", shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=vpinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh_bel,height3d=height3d)
#name_additions=[]
class_name="ACP CA14-H2,5"; add_description="http://www.acptechnologies.com/wp-content/uploads/2017/10/03-ACP-CA14-CE14.pdf"
pins = 3; rmx=2.5; rmy=5; ddrill=1.3; wbody=-5.0; hbody=14; dbody=0; height3d = 17; screwzpos = 10; wscrew=-wbody; dscrew=6
wshaft=0; dshaft=5; pinxoffset=0; pinyoffset=(hbody-2*rmy)/2.0
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="ACP CA14-H4"
rmx=4
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="ACP CA14-H5"
rmx=5
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerHorizontal(style="trimmer", class_name=class_name, wbody=wbody, hbody=hbody, wscrew=wscrew, dscrew=dscrew, wshaft=wshaft, dshaft=dshaft, pinxoffset=pinxoffset,pinyoffset=pinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, R_POW=R_POW, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trv,height3d=height3d, screwzpos=screwzpos)
class_name="ACP CA14V-15"
rmx=15; dbody=0; voffsetx=0.5; vwbody=14; vpinyoffset=(hbody-2*rmy)/2.0; c_offsetx=vwbody/2.0; c_offsety=hbody/2.0; c_ddrill=7; height3d=7.2
#name_additions = ["Px{0:1.1f}mm_Py{1:1.1f}mm".format(rmx, 2 * rmy)]
makePotentiometerVertical(style="trimmer", shaft_hole=False, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=vpinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh,height3d=height3d)
makePotentiometerVertical(style="trimmer", shaft_hole=True, class_name=class_name, wbody=vwbody, hbody=hbody, d_body=dbody, dshaft=dshaft, dscrew=dscrew, c_ddrill=c_ddrill,c_offsetx=c_offsetx, c_offsety=c_offsety, pinxoffset=voffsetx,pinyoffset=vpinyoffset, pins=pins, rmx=rmx, rmy=rmy, ddrill=ddrill, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_trh_bel,height3d=height3d)
#name_additions=[]
class_name="Bourns 3005"; add_description = "http://www.bourns.com/docs/Product-Datasheets/3005.pdf";
wbody=19.3; hbody=4.06; pinxoffset=16; pinyoffset=(hbody-2.54)/2.0+2.54; height3d = 7.87; rmx2=-7.62; rmy2=-2.54; rmx3=-12.7; rmy3=0; ddrill=1; dscrew=3; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody/2.0
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Vishay 43"; add_description = "http://www.vishay.com/docs/57026/43.pdf";
wbody=19.0; hbody=4.8; pinxoffset=16; pinyoffset=(hbody-2.54)/2.0+2.54; height3d = 6.35; rmx2=-7.62; rmy2=-2.54; rmx3=-12.7; rmy3=0; ddrill=1; dscrew=2.36; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody/2.0
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3006P"; add_description = "https://www.bourns.com/docs/Product-Datasheets/3006.pdf";
wbody=19.05; hbody=4.83; pinxoffset=16; pinyoffset=(hbody-2.54)/2.0+2.54; height3d = 6.35; rmx2=-7.62; rmy2=-2.54; rmx3=-12.7; rmy3=0; ddrill=1; dscrew=2.36; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody/2.0
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3006W"
wbody=19.05; hbody=4.83; pinxoffset=16; pinyoffset=4.83+0.15; height3d = 6.35; rmx2=-7.62; rmy2=-5.06; rmx3=-12.7; rmy3=0; ddrill=1; dscrew=2.36; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody/2.0
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3006Y"
wbody=19.05; hbody=4.83; pinxoffset=18.42; pinyoffset=(hbody-2.54)/2.0+2.54; height3d = 6.35; rmx2=-(17.78-7.62); rmy2=-2.54; rmx3=-17.78; rmy3=0; ddrill=1; dscrew=2.36; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody/2.0
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3009P"; add_description = "http://www.bourns.com/docs/Product-Datasheets/3009.pdf"
wbody=19.05; hbody=4.83; pinxoffset=16; pinyoffset=(hbody-2.54)/2.0+2.54; height3d = 8.98; rmx2=-7.62; rmy2=-2.54; rmx3=-12.7; rmy3=0; ddrill=1; dscrew=2.36; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody/2.0
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3009Y"
wbody=19.05; hbody=4.83; pinxoffset=18.42; pinyoffset=(hbody-2.54)/2.0+2.54; height3d = 8.98; rmx2=-(17.78-7.62); rmy2=-2.54; rmx3=-17.78; rmy3=0; ddrill=1; dscrew=2.36; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody/2.0
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3296W"; add_description = "https://www.bourns.com/pdfs/3296.pdf";
wbody=9.53; hbody=4.83; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=2.41; height3d = 10.03; rmx2=-2.54; rmy2=0; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=2.19; wscrew = dscrew; screwxoffset = wbody-1.27; screwyoffset = hbody-1.27
style = "screwtop"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tst, height3d=height3d)
class_name="Bourns 3296X"
wbody=9.53; hbody=4.83; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=2.41; height3d = 10.03; rmx2=-2.54; rmy2=0; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew = 2.19; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody-1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3296Y"
wbody=9.53; hbody=4.83; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=1.14; height3d = 10.03; rmx2=-2.54; rmy2=2.54; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=2.19; wscrew = dscrew; screwxoffset = wbody-1.27; screwyoffset = hbody-1.27
style = "screwtop"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tst, height3d=height3d)
class_name="Bourns 3296Z"
wbody=9.53; hbody=4.83; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=hbody-1.14-2.54; height3d = 10.03; rmx2=-2.54; rmy2=2.54; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=2.19; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody-1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3296P"
wbody=10.03; hbody=9.53; pinxoffset=wbody/2.0; pinyoffset=(hbody-5.08)/2.0; height3d = 4.83; rmx2=-2.54; rmy2=2.54; rmx3=0; rmy3=5.08; ddrill=0.8; dscrew=2.19; wscrew = 1.52; screwxoffset = 0; screwyoffset = 1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3299W"; add_description = "https://www.bourns.com/pdfs/3299.pdf";
wbody=9.53; hbody=6.10; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=1.91; height3d = 10.03; rmx2=-2.54; rmy2=0; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=2.19; wscrew = dscrew; screwxoffset = wbody-1.27; screwyoffset = hbody-1.27
style = "screwtop"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tst, height3d=height3d)
class_name="Bourns 3299X"
wbody=9.53; hbody=6.10; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=1.91; height3d = 10.03; rmx2=-2.54; rmy2=0; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew = 2.19; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody-1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3299Y"
wbody=9.53; hbody=6.10; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=1.91; height3d = 10.03; rmx2=-2.54; rmy2=2.54; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=2.19; wscrew = dscrew; screwxoffset = wbody-1.27; screwyoffset = hbody-1.27
style = "screwtop"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tst, height3d=height3d)
class_name="Bourns 3299Z"
wbody=9.53; hbody=6.10; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=1.91; height3d = 10.03; rmx2=-2.54; rmy2=2.54; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=2.19; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody-1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3299P"
wbody=10.03; hbody=9.53; pinxoffset=wbody/2.0; pinyoffset=(hbody-5.08)/2.0; height3d = 6.10; rmx2=-2.54; rmy2=2.54; rmx3=0; rmy3=5.08; ddrill=0.8; dscrew=2.19; wscrew = 1.52; screwxoffset = 0; screwyoffset = 1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3266Y"; add_description = "https://www.bourns.com/docs/Product-Datasheets/3266.pdf";
wbody=6.71; hbody=4.5; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=2.16; height3d = 6.71; rmx2=-2.54; rmy2=0; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=1.78; wscrew = dscrew; screwxoffset = wbody-1.22; screwyoffset = hbody-1.27
style = "screwtop"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tst, height3d=height3d)
class_name="Bourns 3266Z"
wbody=6.71; hbody=4.5; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=2.16; height3d = 6.71; rmx2=-2.54; rmy2=0; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew = 1.78; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody-1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3266W"
wbody=6.71; hbody=4.5; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=1.02; height3d = 6.71; rmx2=-2.54; rmy2=2.54; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=1.78; wscrew = dscrew; screwxoffset = wbody-1.27; screwyoffset = hbody-1.27
style = "screwtop"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tst, height3d=height3d)
class_name="Bourns 3266X"
wbody=6.71; hbody=4.5; pinxoffset=(wbody-5.08)/2.0+5.08; pinyoffset=1.02; height3d = 6.71; rmx2=-2.54; rmy2=2.54; rmx3=-5.08; rmy3=0; ddrill=0.8; dscrew=1.78; wscrew = 1.52; screwxoffset = 0; screwyoffset = hbody-1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
class_name="Bourns 3266P"
wbody=6.71; hbody=6.71; pinxoffset=wbody/2.0; pinyoffset=(hbody-5.08)/2.0; height3d = 4.5; rmx2=-2.54; rmy2=2.54; rmx3=0; rmy3=5.08; ddrill=0.8; dscrew=1.78; wscrew = 1.52; screwxoffset = 0; screwyoffset = 1.27
style = "screwleft"; SMD_pads = False; SMD_padsize = []
makeSpindleTrimmer(shaft_hole=False, class_name=class_name, ddrill=ddrill, wbody=wbody, hbody=hbody, pinxoffset=pinxoffset, pinyoffset=pinyoffset, rmx2=rmx2, rmy2=rmy2, rmx3=rmx3, rmy3=rmy3, dscrew=dscrew, wscrew=wscrew, screwxoffset=screwxoffset, screwyoffset=screwyoffset, style=style, SMD_pads=SMD_pads, SMD_padsize=SMD_padsize, specialtags=[], add_description=add_description, name_additions=[], script3d=script3d_tsl, height3d=height3d)
| 151.41844
| 495
| 0.764309
| 9,779
| 64,050
| 4.857654
| 0.032723
| 0.052481
| 0.028588
| 0.036756
| 0.948845
| 0.935246
| 0.930825
| 0.92051
| 0.912237
| 0.898491
| 0
| 0.06115
| 0.087494
| 64,050
| 422
| 496
| 151.777251
| 0.751617
| 0.06701
| 0
| 0.365439
| 0
| 0.022663
| 0.068105
| 0.001976
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016997
| 0
| 0.016997
| 0.002833
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ddb3325498618dd871d413c684017a29b1838a5
| 80
|
py
|
Python
|
jenkins/test.py
|
martyni/static_uploader
|
6509303aaf1cbfca83189ecbe346345fefb0a234
|
[
"MIT"
] | 1
|
2017-06-12T13:59:21.000Z
|
2017-06-12T13:59:21.000Z
|
jenkins/test.py
|
martyni/static_uploader
|
6509303aaf1cbfca83189ecbe346345fefb0a234
|
[
"MIT"
] | 1
|
2021-06-01T21:49:30.000Z
|
2021-06-01T21:49:30.000Z
|
jenkins/test.py
|
martyni/static_uploader
|
6509303aaf1cbfca83189ecbe346345fefb0a234
|
[
"MIT"
] | null | null | null |
import requests
import sys
print "url: " + sys.argv[1]
base_url = sys.argv[1]
| 11.428571
| 27
| 0.6875
| 14
| 80
| 3.857143
| 0.571429
| 0.222222
| 0.37037
| 0.407407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.175
| 80
| 6
| 28
| 13.333333
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0.063291
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3df7d9547ab5ffa2604f8b528f1fe7db20182821
| 27,406
|
py
|
Python
|
week/migrations/0001_initial.py
|
Tarun1226/Fitgirl
|
41686530c09eee33492d70474cd11749123e8eba
|
[
"MIT"
] | null | null | null |
week/migrations/0001_initial.py
|
Tarun1226/Fitgirl
|
41686530c09eee33492d70474cd11749123e8eba
|
[
"MIT"
] | 13
|
2019-03-04T00:59:24.000Z
|
2021-09-08T00:51:08.000Z
|
week/migrations/0001_initial.py
|
Tarun1226/Fitgirl
|
41686530c09eee33492d70474cd11749123e8eba
|
[
"MIT"
] | 1
|
2019-03-11T02:24:52.000Z
|
2019-03-11T02:24:52.000Z
|
# Generated by Django 2.1.5 on 2019-03-04 02:29
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.documents.blocks
import wagtail.embeds.blocks
import wagtail.images.blocks
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0041_group_collection_permissions_verbose_name_plural'),
('wagtailimages', '0001_squashed_0021'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('account', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AboutUsIndexPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('description', wagtail.core.fields.RichTextField(blank=True)),
('ad_url', models.URLField(blank=True)),
('ad_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='BlogPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('author', models.CharField(max_length=255)),
('date', models.DateField(verbose_name='Post date')),
('body', wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('HTML', wagtail.core.blocks.RawHTMLBlock()), ('embedded_video', wagtail.embeds.blocks.EmbedBlock(icon='media')), ('Page', wagtail.core.blocks.PageChooserBlock()), ('Document', wagtail.documents.blocks.DocumentChooserBlock())])),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='CustomFormSubmission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('form_data', models.TextField()),
('submit_time', models.DateTimeField(auto_now_add=True, verbose_name='submit time')),
],
),
migrations.CreateModel(
name='Disclaimerlink',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('disclaimer', wagtail.core.fields.RichTextField(blank=True)),
('disclaimer2', models.CharField(blank=True, max_length=10000)),
('disclaimer3', models.CharField(blank=True, max_length=10000)),
('disclaimer4', models.CharField(blank=True, max_length=10000)),
('disclaimer5', models.CharField(blank=True, max_length=10000)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='DisclaimerPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('disclaimer', wagtail.core.fields.RichTextField(blank=True)),
('disclaimer2', models.CharField(blank=True, max_length=10000)),
('disclaimer3', models.CharField(blank=True, max_length=10000)),
('disclaimer4', models.CharField(blank=True, max_length=10000)),
('disclaimer5', models.CharField(blank=True, max_length=10000)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='ExtrasIndexPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('description', wagtail.core.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='Fact',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('body', wagtail.core.fields.RichTextField(blank=True)),
('display_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='MentalArtPostPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.RichTextField(blank=True)),
('display_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='MentalPostPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.RichTextField(blank=True)),
('display_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='ModelIndexPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('description', wagtail.core.fields.RichTextField(blank=True)),
('intro', models.CharField(blank=True, max_length=255)),
('ad_url', models.URLField(blank=True)),
('vertical_url', models.URLField(blank=True)),
('announcements', wagtail.core.fields.RichTextField(blank=True)),
('ad_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('display_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('vertical_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='NutritionPostPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.RichTextField(blank=True)),
('morecontent', wagtail.core.fields.RichTextField(blank=True)),
('facts', wagtail.core.fields.RichTextField(blank=True)),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('display_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='PhysicalFormField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('label', models.CharField(help_text='The label of the form field', max_length=255, verbose_name='label')),
('field_type', models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('multiselect', 'Multiple select'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time'), ('hidden', 'Hidden field')], max_length=16, verbose_name='field type')),
('required', models.BooleanField(default=True, verbose_name='required')),
('choices', models.TextField(blank=True, help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', verbose_name='choices')),
('default_value', models.CharField(blank=True, help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, verbose_name='default value')),
('help_text', models.CharField(blank=True, max_length=255, verbose_name='help text')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='PhysicalPostPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('strength', wagtail.core.fields.RichTextField(blank=True)),
('agility', wagtail.core.fields.RichTextField(blank=True)),
('flexibility', wagtail.core.fields.RichTextField(blank=True)),
('points_for_this_activity', models.IntegerField(blank=True, default=0)),
('timer_for_this_activity', models.CharField(blank=True, default=datetime.time(0, 11), help_text='Time format should be in MM:SS', max_length=20)),
('thank_you_text', wagtail.core.fields.RichTextField(blank=True)),
('start_date', models.DateTimeField(blank=True, null=True, verbose_name='Start Date')),
('end_date', models.DateTimeField(blank=True, null=True, verbose_name='End Date')),
('display_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='PostassessmentFormField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('label', models.CharField(help_text='The label of the form field', max_length=255, verbose_name='label')),
('field_type', models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('multiselect', 'Multiple select'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time'), ('hidden', 'Hidden field')], max_length=16, verbose_name='field type')),
('required', models.BooleanField(default=True, verbose_name='required')),
('choices', models.TextField(blank=True, help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', verbose_name='choices')),
('default_value', models.CharField(blank=True, help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, verbose_name='default value')),
('help_text', models.CharField(blank=True, max_length=255, verbose_name='help text')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='PostassessmentPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('thank_you_text', wagtail.core.fields.RichTextField(blank=True)),
('points_for_this_activity', models.IntegerField(blank=True, default=0)),
('start_date', models.DateTimeField(blank=True, null=True, verbose_name='Start Date')),
('end_date', models.DateTimeField(blank=True, null=True, verbose_name='End Date')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='PreassessmentFormField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('label', models.CharField(help_text='The label of the form field', max_length=255, verbose_name='label')),
('field_type', models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('multiselect', 'Multiple select'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time'), ('hidden', 'Hidden field')], max_length=16, verbose_name='field type')),
('required', models.BooleanField(default=True, verbose_name='required')),
('choices', models.TextField(blank=True, help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', verbose_name='choices')),
('default_value', models.CharField(blank=True, help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, verbose_name='default value')),
('help_text', models.CharField(blank=True, max_length=255, verbose_name='help text')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='PreassessmentPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('thank_you_text', wagtail.core.fields.RichTextField(blank=True)),
('points_for_this_activity', models.IntegerField(blank=True, default=0)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='Print',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='ProgramIndexPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('description', wagtail.core.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='QuestionFormField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('label', models.CharField(help_text='The label of the form field', max_length=255, verbose_name='label')),
('field_type', models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('multiselect', 'Multiple select'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time'), ('hidden', 'Hidden field')], max_length=16, verbose_name='field type')),
('required', models.BooleanField(default=True, verbose_name='required')),
('choices', models.TextField(blank=True, help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', verbose_name='choices')),
('default_value', models.CharField(blank=True, help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, verbose_name='default value')),
('help_text', models.CharField(blank=True, max_length=255, verbose_name='help text')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='QuestionPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('thank_you_text', wagtail.core.fields.RichTextField(blank=True)),
('points_for_this_activity', models.IntegerField(blank=True, default=0)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='QuestionPageText',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('description', wagtail.core.fields.RichTextField(blank=True)),
('thank_you_text', wagtail.core.fields.RichTextField(blank=True)),
('points_for_this_activity', models.IntegerField(blank=True, default=0)),
('display_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='QuestionTextFormField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('label', models.CharField(help_text='The label of the form field', max_length=255, verbose_name='label')),
('field_type', models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('multiselect', 'Multiple select'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time'), ('hidden', 'Hidden field')], max_length=16, verbose_name='field type')),
('required', models.BooleanField(default=True, verbose_name='required')),
('choices', models.TextField(blank=True, help_text='Comma separated list of choices. Only applicable in checkboxes, radio and dropdown.', verbose_name='choices')),
('default_value', models.CharField(blank=True, help_text='Default value. Comma separated values supported for checkboxes.', max_length=255, verbose_name='default value')),
('help_text', models.CharField(blank=True, max_length=255, verbose_name='help text')),
('page', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_field', to='week.QuestionPageText')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='RewardsIndexPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('description', wagtail.core.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='RewardsPostPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('intro', wagtail.core.fields.RichTextField(blank=True)),
('description', wagtail.core.fields.RichTextField(blank=True)),
('display_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='UserActivity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Activity', models.CharField(max_length=50)),
('Week', models.IntegerField(null=True)),
('DayOfWeek', models.CharField(max_length=10)),
('points_earned', models.IntegerField(null=True)),
('creation_date', models.DateField()),
('updated_date', models.DateField()),
('program', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Program')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='WeekPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('description', wagtail.core.fields.RichTextField(blank=True)),
('start_date', models.DateTimeField(blank=True, null=True, verbose_name='Start Date')),
('end_date', models.DateTimeField(blank=True, null=True, verbose_name='End Date')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.AddField(
model_name='questionformfield',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_fields', to='week.QuestionPage'),
),
migrations.AddField(
model_name='preassessmentformfield',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_fields', to='week.PreassessmentPage'),
),
migrations.AddField(
model_name='postassessmentformfield',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_fields', to='week.PostassessmentPage'),
),
migrations.AddField(
model_name='physicalformfield',
name='page',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_fields', to='week.PhysicalPostPage'),
),
migrations.AddField(
model_name='customformsubmission',
name='page',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Page'),
),
migrations.AddField(
model_name='customformsubmission',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='question_form', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='customformsubmission',
unique_together={('page', 'user')},
),
]
| 61.586517
| 461
| 0.603189
| 2,711
| 27,406
| 5.962744
| 0.08779
| 0.051222
| 0.035509
| 0.0558
| 0.86279
| 0.861182
| 0.838911
| 0.831673
| 0.828766
| 0.818559
| 0
| 0.007455
| 0.246296
| 27,406
| 444
| 462
| 61.725225
| 0.775126
| 0.001642
| 0
| 0.748284
| 1
| 0
| 0.211557
| 0.014401
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022883
| 0
| 0.032037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b14203aa97e45c85d4a026d0c82d204ab916e3a4
| 202,162
|
py
|
Python
|
projects/src/main/python/CodeJam/Y14R5P1/Nin/generated_py_de99b6cfd1c241b9b63c1aa0be3366a5.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 5
|
2020-04-05T18:04:13.000Z
|
2021-04-13T20:34:19.000Z
|
projects/src/main/python/CodeJam/Y14R5P1/Nin/generated_py_de99b6cfd1c241b9b63c1aa0be3366a5.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 1
|
2020-04-29T21:42:26.000Z
|
2020-05-01T23:45:45.000Z
|
projects/src/main/python/CodeJam/Y14R5P1/Nin/generated_py_de99b6cfd1c241b9b63c1aa0be3366a5.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 3
|
2020-01-27T16:02:14.000Z
|
2021-02-08T13:25:15.000Z
|
import sys
sys.path.append('/home/george2/Raise/ProgramRepair/CodeSeer/projects/src/main/python')
from CodeJam.Y14R5P1.Nin.a2 import *
def func_717d246c1b9b4310ac153dec98d6567d(cum, ar, al):
amid = (al + ar) // 2
left = cum[amid]
return left
def func_45a8ae82050e4422b36fef97690207cf(cum, ar, al):
amid = (al + ar) // 2
left = cum[amid]
return amid
def func_363fa4131bca4c56a7ac090593e893fc(cum, amid, b):
left = cum[amid]
right = cum[b] - cum[amid]
return right
def func_d5d4c3663d48447494b8a8cdaf4e232d(cum, amid, b):
left = cum[amid]
right = cum[b] - cum[amid]
return left
def func_7d07b68537204d21b89775bd045c274a(cum, left, amid, b):
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return al
def func_f30e678f9551418faa66237510177c05(cum, left, amid, b):
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return ar
def func_4fdcfbb1250c4998bc11c407402b095b(cum, left, amid, b):
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return right
def func_faf04282331749f9be21410277064c49(cum, ar, al, b):
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
return amid
def func_5e90434bef2b45f8915866e2133ffd6a(cum, ar, al, b):
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
return right
def func_acd5e8dd008f49f09ff5962dd582dae6(cum, ar, al, b):
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
return left
def func_17d2f2e187844d84bb9dcc0913caa8f8(cum, amid, b):
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return al
def func_97ed03ac8d0c4d27808250ddf19042e5(cum, amid, b):
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return right
def func_1ad4fb9a676f4952867d44daa778ddd8(cum, amid, b):
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return left
def func_c5d88fa3e4a24aeeabb77ecb458070ed(cum, amid, b):
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return ar
def func_2aee8dcf68974fdb989c67b57fae2a07(cum, b):
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return amid
def func_d5e8344c93b8484ca77f00e9a008672a(cum, b):
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return left
def func_fc55837e1d3543fba8b9a861f4edf0e6(cum, b):
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return al
def func_9f255a63a75e407fb8704e1e07f8f7ed(cum, b):
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return right
def func_fd9dfb08df09463c905f4b6a1296e7d5(cum, b):
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return ar
def func_15da570d60e344b997a020076cdf82dd(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
return rem
def func_1cd4a7f74acd420494709fcbcfd7b065(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
return al
def func_f2db81a6a6bb4227bffcb9a56a5b2d71(b):
al = 0
ar = b
return al
def func_47501cdae7d34d3a9bfa47a8ce22e9ee(b):
al = 0
ar = b
return ar
def func_a72d96056dd84545a5b903150c3e0ebe(cum, b):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return al
def func_f5013bbc8df04672b52f47f620d0b9fa(cum, b):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return left
def func_50013e4f2b484aceaed7ea7385722584(cum, b):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return amid
def func_f493f3cb37474b128635bbb9f1477314(cum, b):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return right
def func_6113c133c85443f59af3e5121749ee30(cum, b):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return ar
def func_62b3513891cc447f88cd44f3e27fc1ee(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return right
def func_4b1548a6488249e9ba40dfae3e302bad(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return s
def func_5115fdf84d9a4a69b77a81dc349e784b(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return al
def func_120bf244f89e41d9ab7493ff05926cbc(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return amid
def func_67c1179c8e544f0fb3e140a9869a8b0a(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return left
def func_8ff03a8e5077497bbe76bde6b7e82095(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return ar
def func_d708a0becec44cd79d52e86ecb718870(s, cum, al, b, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return s
def func_e599fddf746044b3a708798ee43d15b5(vsota, s, p):
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_378f057af3c7477eb6074a442ad2f0bb(vsota, s, p):
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_22b5edebfed34388b50dc8f795eed272(vsota, s, p):
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_c3f21485ff654940a3e153744bd5fefc(s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return s
def func_1fc125baeee343e0b82dfb53afb3e6a3(s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return best
def func_5610c77d459f46e7906a9deed81b7e9b(s, cum, ar, b, n):
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_4ce92cb7d330420d966c06c8f6b5350a(vsota, s, p):
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_5adbee6badf448c0a087a77aeec7997a(vsota, s, p):
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_b4e1bd36610b4eefb7a3e1f2f82b513a(vsota, s, p):
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_0a8fcc166bb04210b46db2f73cf70596(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
return rem
def func_2b0c152a009a4655a2da10197fe560ca(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
return ar
def func_62c78243c0704d67be90d4ee6bbbad97(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
return al
def func_416ceb9c18e74625a45d645bfbca37b8(cum, b):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return ar
def func_b10aa2dc8e164c58939b98d21cb6f6af(cum, b):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return amid
def func_fa25c79888944ddf920da89f544f8bad(cum, b):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return left
def func_64bc4a1e98d14a63ba77a4c3b4dcdb98(cum, b):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return al
def func_3fabea1491ef43f5afb69db1357b3b66(cum, b):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return right
def func_c584c2bc39d64bacaef4e09f4a94e3bb(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return ar
def func_52fa32389756454bbbb63c0614cc1578(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return amid
def func_6c9db2e6a8ea432da36b2387b22f5b6b(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return left
def func_d9af8ec9b2da476493c9c78c7180b79b(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return right
def func_92342efe662940ddbf295031682d7dda(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return al
def func_8ff35685ac2c4676bd2bc4544d5c5b53(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return s
def func_715658bd0cb549d9a3f2aaf71acd3144(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return s
def func_6b138d2a733c484ba8d6324a47ec6797(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return left
def func_4af1f2bd458d420999b25940bc0ea69c(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return ar
def func_207f5e3017f744819b1e1a2f6c09dc03(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return amid
def func_182a8afcec51468c833b6fab79cadb19(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return right
def func_cb280208cbe9442b9db42a73f99f5438(s, cum, b, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return al
def func_55a12b490ae24ddda9518ed612dabee5(vsota, s, cum, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_28f9937bc37f48ec9ef630d305a1f46a(vsota, s, cum, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_1c3f9c53836a404497fd19deb6e6ade6(vsota, s, p):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_b2b29617f6e54becb6362632613f419e(vsota, s, p):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_4d425522d868448294b6d44de49382e4(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return p
def func_ea43a5b874244cf9a93b60c0edbbf8f1(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return s
def func_805de542e7c54250a136f8e8456c5060(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return best
def func_ca04c13dfaa04acd9c639dfdb7f5db8f(s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_e483232c797f45d2815e1cb0fda8b88b(s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return best
def func_febd6d26cf184f00a5a73b2673710fff(vsota, s, cum, ar, b, p, n):
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_307ffbb0ec06465ea2d052c1d5736801(vsota, s, cum, ar, b, p, n):
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_58c765c44b804436bf8614cef8b99a7e(vsota, s, p):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_9ce8fbc4d87b42049ae4afbcbb4af04f(vsota, s, p):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_535cf2767e3f443b892d9d2d1fa55389(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return ar
def func_538477d687f14906a3c785e4dcf7915e(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return left
def func_262333ff1ffb4f2c816e12d54ec99f0a(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return right
def func_b9664bbfa0aa49abbe4aa874a609f5e7(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return amid
def func_e364e1aad6424a89a45b69a8a22ce0c8(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return al
def func_fb7e3f128fe44aed93ed764974a6d9b6(vsota, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
return rem
def func_fcd65948a50c4fa3ab294a9237172cd4(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return amid
def func_e2da72ec51d640648f401c4bfcda296e(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return left
def func_54ae434b2d2d457ebc2ce310e21a3f12(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return ar
def func_d23debd6797e49c4922935ce4d7779a8(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return s
def func_fea698bf101d49dd8ff396b2839e5c55(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return right
def func_268a5f466f2a4f36ab767412c5fb70ca(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return al
def func_d3039af70ed14a359a1738a328ff2c44(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return right
def func_61a3e42191734991a454042ca2ffb267(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return s
def func_53d580f6fa164617ad985803db21fcfa(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return left
def func_812aa4709e4241fbae7b0fcd1c908e88(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return al
def func_8dfdba3ab30c48488b08868a061db22e(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return ar
def func_01c52dfb08c54fcd9491cd32b5f3f4f6(s, cum, b, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return amid
def func_59ec8afc6cbc4d4096123f1e3dafd920(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_04e569aee58040e38359df688af93409(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return ar
def func_30545de20ce14a409b69782d0f4b6a0b(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return left
def func_db7c1d88b49446e69a0b1d15dba93268(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_87839d5537dc4bd5adb2d56166b0a9d5(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return right
def func_59ae3ca45c5340d3948fa43bfea91f83(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return amid
def func_87e27f22265b4acfbf538c5994a2384c(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return al
def func_8beb4563966242aaba3d61051c05550e(vsota, s, cum, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_f3ece5c5cd6c461b961fe28e8df711f5(vsota, s, cum, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_e11bcbf73ee24c729a3b60f6a9899dc3(vsota, s, cum, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_a0b2a79b43124a29be14636d58395fa8(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return s
def func_8502e54c64714600b7da5b1f5e112be0(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return p
def func_c1be09dcc051423dad53ee5a483eb54c(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return best
def func_b1086f2ee0184945a641f2b079a056b7(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return best
def func_ccc204355bc449a7bc39318afc6e6b75(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_498c3d76911240c5bd3e15ac70b5e9e7(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return p
def func_61075cef8b6c4f13a0e9177626e48f79(vsota, s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_3307473703de463ea1d409dd7697fb1b(vsota, s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_bf541e42e1654c03b84ad3ef3bb0b4f1(vsota, s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return best
def func_df789ba0ddd845df806a7badffd8f3e7(vsota, s, cum, ar, b, p, n):
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_c4252f21333747a981fd0d7ebbabfe65(vsota, s, cum, ar, b, p, n):
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_9386587a6d3a496288d1a2c2ca425b5f(vsota, s, cum, ar, b, p, n):
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_6bace7b669f444d1bd3d36bef569aa0a(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return s
def func_cb272c1ef3a94239beeea8fbfa4f7504(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return amid
def func_e3259b6bdac348bb9fbe86204be5ecad(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return rem
def func_bd875015e6224d3b890b731d74bf9e2d(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return right
def func_01b85ffbb17a45c89a2916b5d0512603(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return ar
def func_ede3306e6f2f4458a4ec9f4ac60035e6(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return left
def func_b90cf9bfef10445c9993534c8ff75728(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
return al
def func_65a0fbd5131a402a98bcde406342eeab(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return s
def func_2a09d4e696d844ebaac1cdffcdfab8a7(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return left
def func_a361a7fb84d44a17a45fd8993edf21e0(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return right
def func_88bb8d366a674c8983986b4dbc7e025e(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return ar
def func_919a45d8558a44ceae99b7c285a4c76f(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return amid
def func_a530fb5fa2604b909c83b26a0cadeb75(s, cum, b, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return al
def func_d77873fd855f48b6a8ea9335e3ce8152(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return al
def func_e0e7aeb3f30043b0bbdf5ba6405e9867(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return right
def func_a3e4e08f0c9545d9a959d96cb3e31cb0(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_4ba46be9fb5f4850bc5b4d8bb77ca36c(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_93f16441eb7248dc9efc79580331f01e(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return ar
def func_cfb4be0f0ec841c89f0a9bd04333703b(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return amid
def func_ed792c5e5d9144f2a757f47eb5443f4e(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return left
def func_7ecef8634c4443269276e9e8db793e0c(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_32a2d8a356624e09b8dcde1d3846776b(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_f6655af5d60c404caaeba68eaae9b5ef(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_a311ecb00f7943a1845e7903503fcb1a(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_5604a379b5fa4e878d68c078bf8e36cc(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_6487b50ad02244b68aa52dca3941131c(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_2b7090153f4a4881af01ec60025d75a3(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_e1cf39ab5ce54ad583e65ce1c342bc50(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_9a1c1722af3b41b68a11191c922788ff(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return s
def func_14cd7994503647a1b4883671c4fe6400(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return best
def func_97ae317be5cc4428950806848f01efc7(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return p
def func_43bfe0b0a5bf4519a0d8e94223ea6b9d(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return best
def func_359e9843d3b24f2d879b44989df8609d(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return p
def func_f94bc630c37e41188da56a6f6d757385(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_9f5fb5caec0a44158f222efd133050ef(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_0993bbad1802456a93f7a4f2a84b215e(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return best
def func_8da2d74b14fd4697995c0768f4054888(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_e5d00bd19901473fa20db8f2402e924f(vsota, s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_e4eb3f9599cd4fbcb8c7e6e627bd72a9(vsota, s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_aec5c083995243acabae57e632a65a97(vsota, s, cum, ar, b, p, n):
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_e9482789bf29497a99ddd954df22e0f3(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return s
def func_a2faf4b1f3234edcb40e8c82aab7264b(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return amid
def func_a8acf9f8062c492092cb7f2a10dc3b28(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return right
def func_38697314770b4a37a6a3a939299b792d(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return rem
def func_0652969ba3ef485099447dc180fc9bca(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return ar
def func_aa5946ef2b2d4d5d9c7fb0bba269909a(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return al
def func_d951ebe498814d1c863d6c659bfd6ce2(vsota, s, cum, b, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
return left
def func_9170acc4fa9a48928cfddab5a039ba8c(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_362945995a094d98be3e4bdf43ffdbde(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_88451ae485ce4ef5ac0eaae0c17c0162(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return al
def func_1b26807116354ec0a9d325da58ecd232(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return left
def func_255f8d43509b419e96a03d0c54dc64de(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return ar
def func_8fb124c2d6a84fb4b6b13a78c92d96e8(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return right
def func_3ea71417667944fd8d3f8345f2eb4641(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return amid
def func_22fbaa42163945639fd71b13d2fe0642(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_7d95c9f8ac1a4102bade97182171329b(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_0a0e1b6c927948bcb8d54aa19c30df02(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_bf9bff11066a462ebaa5301e03303c86(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_6e288e5ef98f47c685ce8db7b0f99f8e(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_7a2891bdc6c14519bb771a261c5a7d89(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_ecd6ec4c74064c4e9f12c52ec51e392f(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_a1530ef6d01144b7b8988858a83f271b(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_9973ee79f4dd403eaf038acb5fbaa3cf(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return amid
def func_8a66d41490504dd2b41ebe7bcf02a90e(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return ar
def func_50ddf89e730644d08b595ff15dc112ef(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return best
def func_b8d959366275418e9d0ce71ac62d75d0(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return al
def func_ad739c7b1f634a068252401b7f022e6f(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return s
def func_1e9d5dafb0d34c5bb61cfbe17bc98470(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return right
def func_5ea77cd61f1f49888137c4cb3f9fe65f(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return left
def func_d218712f029849be800ac3eda3315ad9(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return p
def func_94a7de48fbdc473ab8228aff19337229(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_7ca7a6f305124064892897f612689a95(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return p
def func_5716f4f9288d4ef2beac3029fe442df8(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return best
def func_bc45fa924eb247c492416148059179b5(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return best
def func_9f79ca79a611415ab6d51c45b86d7cd3(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_32d8d595d86a4999a51c189147fa69b1(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_206d4423904045aaa2caf90570ac006c(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_5ded8d05671647b3936d173b8be8c625(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_33031adcaa7c408f9993d8f1d3378b7c(vsota, s, cum, ar, b, p, n):
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_8bcf9fa3926647e3be71f656dd84bff4(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_8606815b134e468a833e9beff8f025a7(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return right
def func_eabab10945dc48a5aceb212c38ff7990(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return al
def func_5cc7ec9e39a1465e9dc35bc8afa28f04(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_2384072ee28a4d5dad31c9f5a121e4b1(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return rem
def func_4cec2538baab49ce867e36b74e6947e5(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return amid
def func_45777745748c40bc968130fa788355d5(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return ar
def func_e2b8041d7c57449ea4d5677291773e4c(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return left
def func_ebaa7d664cc246c1b0ef8ebc7eee97cf(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_b20ca598638e4ab78c733240841bde72(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_69883f0c46114095a2243c4f086e5d3b(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_002656a3213e4f33800a4af377353824(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_f821991fc57d40f6b06a49c551be2ec5(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_de9a3bb50bc141c490bb6213860d6e5a(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_620d7c1533af42a49e6c8d6d28095e24(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_670d535c08b74cac8c818a06eddc1d62(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_929ae02a44be443791d564c65395a889(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return ar
def func_a494fc2203cf4d2aaa9c8dcd071a5fec(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return amid
def func_c9d1d8f728b44c698b4985b1d8c8596a(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return right
def func_471b7adf96374afbbfa9f9ded6754749(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return p
def func_52d08f0d933a4e5385075b8e109234a4(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return best
def func_97d44bd91c2b4e2fa5ebbd076acd8268(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return left
def func_15710aca4c58460e81e9982be7d33171(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return s
def func_1ff48d9bd1bd40d3a22058744776afc4(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return al
def func_bde943996d594e1ea9b914b62e6465c7(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return best
def func_bc1213b333af4e119ae1580980feec86(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return amid
def func_d470d7b7b5024d41a04793742f7d03d3(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return ar
def func_70f56a1842784811877cabbc7c3c97fe(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_9e6203d6ed5e47d385ebbd4d2e52f3fd(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return al
def func_90c69f2b76b54784a2892c3708b57a32(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return left
def func_96fe1bbbed034cbf808405ced16488cb(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return p
def func_ed4318dfd2a5494d94ae23a3e86c4548(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return right
def func_21157f7ced314fde9e19b5cd24ecb8f8(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_cd364b77c47b4de7a56a396ed135e889(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_cadfdf5b6ae446b5aee9c8c85ae88eb3(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return best
def func_6cdf4629da6f4daa81f5ebb37a219b55(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_dc8f8302398f4a33bc29828e70fa09e3(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_1bf3d602b98c48398bb82c563da72336(vsota, s, cum, ar, b, p, n):
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_fbb57b90534147309706e0d2573d5000(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_b49190d3b5e144a4901527dd0c722ecf(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_8ddee00f37cb4f10bc03dec70349a35c(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_4e7746abe28b4733a4743ba4a85df21c(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_18e740de90484d28bdf0102b8dfb6370(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return rem
def func_7c7cf0e09f114ac5b674632cb4d4d49b(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_1598d6a2c71a4ecbbdb35693ee2e1321(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_2a522c88a886442d88f82fc7b55b8026(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_b3ceb9525acc4622a1310f1e6511fa38(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_549b3a9c4a594e6f9e6861d1810d34e2(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return right
def func_fa76ee00e42d49bba1fd0027af10a93a(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return best
def func_590db4e7f6e94bd38804cf0bcd89f7b1(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return s
def func_71b53999f1b04f5e8990143091454a08(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return ar
def func_15c0702f96a4464ea68158ac261ea598(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return left
def func_192fbe0ee4224e52b1bdd7fdc021e77b(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return al
def func_7db2acd6cb3a4b5c9a1f8a10b71edd61(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return p
def func_7ec01fca8d9644cf919cf718125213a4(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return amid
def func_13190dcf274444698c65f1060fdb3ff3(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return amid
def func_a8ad2687325c494f96fec92cb32d0693(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return ar
def func_02dc2ea480504557941de572cf347a90(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return p
def func_403e0752609e499a95f16cbafe48222a(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return al
def func_01b1d58e38034dd2a1f619672f0e8f3a(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_b4fabecdd569476aba4b209ac5046f1d(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return right
def func_8a4c7dee1ea44dada20c06654bed3578(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return left
def func_a984c0e8852c438bb45c9e65057b4f19(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return best
def func_07830965dcff4e149d091bd210d53b8c(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return left
def func_f4ed73a0321b4c0ba150eb41643f5a08(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return amid
def func_91f9095207394f138b6faa8f8e676620(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return right
def func_172e5b92aa8743f7bfbdae09bbba0471(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return ar
def func_c44a4a427859410fb6e318e4b3b773a5(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return best
def func_4414a74186764538b52014f9a9888a1d(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_9ff12aaefcf8430f8af31a16b0bf50b4(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return al
def func_7f1cab31ffe74046879bf4e12319983a(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_3c43a6dbd53d4bbdb03c9a1989106d00(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_e9b4ddf457334d9b9b97f32d3c554ebd(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_38c47e80cf5643749a9ff33ec082c57b(vsota, s, cum, ar, al, b, p, n):
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_0320a28792304fe59973fb00f0bb3c9d(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return rem
def func_d3caad791b944f228c46ae4eb5f6ad67(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return best
def func_fb5a9abf51484d3aa566f8bc211f2ba6(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return al
def func_442518fdaf9c40f1b51ae115aeb1b47f(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return amid
def func_7907e6f608ec49589a6310455572b009(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return ar
def func_4393e626191544a1abd2a02377d67aae(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return right
def func_646ce98f9f57433facc2efbcabfdf604(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return p
def func_d8178afb754e41a1aadf97b457546f81(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return s
def func_46beeebf3aa340309f70f9b4365ec5c5(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
return left
def func_77a64ce9cbf3405883373b120d685589(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return amid
def func_42ecfcf2e15f44bab060e135af948513(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return al
def func_fe5254dcbd724f229f36bd8c1b37babe(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return p
def func_77ecd64ee7634c36b0d7acdcb4525a98(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return ar
def func_4b2e793a0dd7403ba9180d7d6a8e5170(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return best
def func_2b472f4a270d4f2387fa5a2cbc8c5eca(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return right
def func_bf5c717dcf9a40eeafe64b80b5c461ef(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return left
def func_8c1eafe080e8444bacef49646c23fd31(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_7bdc19c2a48f4bf98272be8d4b3065e0(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return al
def func_5dd2da837d7c43c8b489a5605ff52a26(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return right
def func_3538d1dbd98e4e948a6a734175f963ab(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_fceadc1946f446c995f31c10f152dc77(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return left
def func_c8a5372df0a842aca2cac31460bb05e9(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return best
def func_334de7864c1c4c9fb9ae8e3b1da1af81(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return amid
def func_3914b00ebf7a486290577701689c6ae4(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_87ef9d3d02d44d1ea8b3b7b07fedbed5(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return ar
def func_399646ccf8824f538e900eab6cda6a91(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_c5d544d7991c417599b345e055f186c6(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_df4f064f41a745a9826b55445b6b0439(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_6d5c996dfcbe4bb1a5372b9c179f1a19(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_7c0ffe22cd1c4849ab2af78fd419d03a(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_ca2670d6dd6749f6b8c70f696fd3599b(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_afeb9050a570405582242a0c959ee39a(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_7b969bfa94104c20926ec9d0159ad005(vsota, s, cum, b, p, n):
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_962bd401c9644c03bde805278fd06a69(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return ar
def func_b3ded44431904aeca4a730534971d74a(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return p
def func_c12ab90f0973414b99d40532338886b4(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return best
def func_6f2968686d2b4ce28ee3bd7abef95b87(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return left
def func_922a71472a0d43a1a8b418410329815b(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return amid
def func_632f0a5308584ec4be693dad11fbb5d5(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return rem
def func_9afa8d8d49184e03a995929cbcd9c975(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return s
def func_ccfbef225c2248d298bf62f80c2ef221(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return right
def func_fb58aded86f648f6b5ddec19be949c42(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
return al
def func_d851d33488364f72a4a04d35bb5a75cc(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_14499f27799d439ea6a3bacbcefda211(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return right
def func_07d1c0b7d7b54a48831f5f5c4c67435d(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return best
def func_63a6fe694ea4426db483016b2b4fd21b(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return left
def func_3d09c4e02eaf47ebbef9eeb824efed79(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return al
def func_507da12dc794414fbe6522962dbe0ae1(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_38b1302020b84397b85e2ca5cf69bc5c(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return amid
def func_a0f32b2fb77a4e15b9c463c13ea945f1(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return ar
def func_14984d1c66834b6f88dc700598d4049a(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_455564920e0d4dab9f87c0737f787479(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_9214a53e14c84589b2f4aefa003982fe(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_1c7dbff8f3e043deb25506c0a8b3cf59(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_931555f1b5fe465a8ef64e66795bcf70(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_b70bd0cee5d94124a567c3191b00b370(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_4938cb1854244cb5821117c7b3fec188(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_552425580cc64e9582f703363e8a623a(vsota, s, cum, b, p, n):
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_ed281b635cbf40b2a96fef354e2326e8(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return s
def func_d2ec22b1dcd84154bb7fda9379ebbec1(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return left
def func_cddcb6e5a5374e208f1e6ee1c20c6b73(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return best
def func_cea8cd4f314d4dea802e41599d76d297(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return al
def func_24fb9e2842e94d4cbdc900ecb7aa963b(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return p
def func_a87863af67204998bb9482cbdf96c4cc(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return ar
def func_7855a7881bd64d3fa006241f6643d343(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return amid
def func_e3b761f98b154604be075d6e3a010f01(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return right
def func_d0aaa5cc0f7d4d40bb137d3719a71712(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
return rem
def func_9ddce7db0a874af0bebb6d6841235ff9(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_3d92191cbc5c4c65a29164726ad73dd4(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_44b69e46a6214807a5820443c3dcf316(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_646bf0a9ddf9492f949a141cc1163ce7(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_3ed52c09080e4b38ac93bc23524582ef(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_3ac03e5be8d94d9e982be540a183e99e(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_96de37e5bfdf4b5bbc704f0c55bd4e60(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_ea2d85eb5ed049e0b117502b04da2a69(vsota, s, cum, b, p, n):
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_0d72093dc72245b9ab79438e9ed05864(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_828504af1f6d4f0b95ad42f3714ff842(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_f52a824d8d7d4044aac67ee3c1ff3b4c(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_8f91055ab55b42619cde2daefa724df0(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_1d0af9e4f8d9479fbf94b07b882fa7a5(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_09ecabe1fa5b4858a42d495c5822c921(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_d27c24d16f194c1a8b98d0d9b3ea84c9(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return rem
def func_d1bd49c27aff497985958be69c16e396(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_39a4f8fc4af143e389b61ba2cb6daf61(vsota, s, cum, b, p, n):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_5a1d7d9e50594971b43b0d1613f75ab2(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
return l
def func_3ff90242573f42f7961a7171e7ff90fc(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
return i
def func_29cd2d7e7b874493984c77518a44467e(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
return vsota
def func_0624f3dd50434e2cb497ca1382445c24(l):
vsota = sum(l)
cum = [0]
return cum
def func_bd0ddcb706654b92a5ea8f4e5bc047fc(l):
vsota = sum(l)
cum = [0]
return vsota
def func_4a321c77087c4f28ba611330523d875b(l):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return cum
def func_66487c945fa14da3989a2fd33276a580(l):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return x
def func_560be556345f4c1f95e0d49b38de689c(cum, l):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return best
def func_6beeb6279a5b4bba931a9ce945d37685(cum, l):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return x
def func_752d606d26234a30baef351aa49c1600(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_46309d3bdcd9470ba98995fca301be00(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_5ed8723e583345258856782fd7da1978(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return rem
def func_5d63b4fb7d664cc49142e0998f170afd(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_f8f0e6cb0d6b4bbfaea9cedad7d1bda7(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_f7e4d5602b3741998613ede464f28ab5(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_613581661f2d4340b8de4437d31481ea(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_d0915ef86c984e4090bf793b45f9acd1(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_75c4851d6ef24739927f324392d652ec(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return b
def func_1e2d6a871baf4c2ea6ced5cdc248666b(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_ae0914f069f142c7b2b03298c239f783(vsota, s, cum, p, n):
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_563511baef8548b2bc009687b233dab0(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
return l
def func_887746103a7e4ed7a31065bacb287a6b(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
return cum
def func_5bf961990e2641ada95df6e3c3623b07(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
return i
def func_a96fb315bf2541e5b21f16714c5ee556(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
return vsota
def func_684b933c52eb404b88eee1fdf8486a71(l):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return x
def func_64dbb86ea41b48c890fd0574ae1446f1(l):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return vsota
def func_4e27dddb3cca4a19b8eb49332bf6eeb9(l):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return cum
def func_98844474a18b485fb6e4fd1189ae0848(l):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return cum
def func_8e0b1bd0a70949ffb3cdb6d3804a49bc(l):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return best
def func_1533683bfba64c4ebdb3a89f9b818002(l):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return x
def func_f45f6912fe8844d28bb958444abd4adb(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_f524a4b3ef904760b9f5c51ffca9bd2d(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_e3820101c8a3435a8844966b4c6d15af(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return b
def func_87284661e92546ea8778a91e60dda22f(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return x
def func_5e483f554af94a01a4fd3c7596764f86(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_6b1d929c1d4c49428cc72c07f3c0805c(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_9dc70f1eca4e4d16a0f246b5238b88ee(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_7fb33953af6c4b16833eaa83d3e31d93(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_04a10d693a68472fb689bac29aa7d5b8(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return rem
def func_32341e35154c4a1a9e47afa69eb296c4(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_e1053851cd564eba965630e6aaddd864(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_b61f3ddd91754bb7b2f81ab401b5bcc8(vsota, s, cum, p, n):
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_1c9ff699e56540f09d1f8b14f0f5b2fe(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return l
def func_6380437294e14950867c975ec1bd65e8(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return cum
def func_b59996bcec58446a8422bd281b371f95(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return x
def func_07357508ba4a48528d852927f71e3857(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return vsota
def func_df0c7d54f81a4e4bac81bd5d279339c5(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
return i
def func_6ca246f5f6c64f9a810c87cd47efe514(l):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return cum
def func_21f77a2097554e7a89f21e47cf37f796(l):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return vsota
def func_501c9d54cd14420481de1285888dd06d(l):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return x
def func_36c99378eb3046cab95f323bbd0b0d9f(l):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return best
def func_5fe6ec58c9a3457aa32d8bcc596e2caf(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_4f0f9f40aa9f4e209e9655de05a5a0ac(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_858315ae603b461fa80eac973e88ea4a(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_2d009adb4f7c42f6ab0fc58226ebf8d4(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return x
def func_edc7d6e9a50a4ac0ad03073b4a988617(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return b
def func_8234eb8cdcc44e4c83a06d4bd0bb55db(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_2ff4926043114d9ea0f523c1090d3d47(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_eb26f37d503e4d908bd621b1725ba2a8(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_17ae2312ec66476eb36abb126f7be05e(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_1da24c71949d44589b711a1f19f63d21(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_4c47c580d15d4c4997907ea46c029232(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return rem
def func_a860946e9a67458ab47cbcda217a175d(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return cum
def func_2bb384990d66440ab353ce1a0071c44b(vsota, s, cum, p, l, n):
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_6e9b8ca81b7a46ef8b4885f229934f65(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return cum
def func_0cf66f4d89354bbe8913856332912b42(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return x
def func_f06ad2fcd44946cb88861e6387f71d62(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return best
def func_684b93d1375a465f8a1213977c72f315(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return vsota
def func_dc81d0657a7d46a683f062b677154e3b(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return l
def func_0d037852158f4795b544cde8c7056985(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
return i
def func_84d225a5b42449a5a80ff935d36ffdd6(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_9397bec11f1341f38eb269ad1da335e5(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return b
def func_c96f57307551439da21c26a1683fe198(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_1d3b1342d13f42d6b9b901abdd0a7530(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return vsota
def func_26575656ebf14862aa1839e9dda37c82(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_1266ab18c1db474990deb208ed7f9a9c(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return x
def func_02202c35bb6b40538d4bb960bc782654(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_aaa8d029cc3242f1b0409937dffc4896(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_792cf28b171a4726bf058f483d97a142(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return rem
def func_737593e0dc0b40f78082cc9c9015d346(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_c9c455c7a5974a90be0871c8ccb83ae6(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_f85911c133084020b6586df2b835ef50(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return cum
def func_3a89b16e862544a4841161072a1e0170(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_2c580d2b330e43ec8541e4bfaaab8be0(vsota, s, p, l, n):
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_f337d088cd454430b6c8999e23b4b594(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return ar
def func_ac146c24394445eb9c0aa6864b9eda60(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return b
def func_8df637b61aad482d935f9900d42736da(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return right
def func_f3d3034e7ad040cf990b672b4cc9daf2(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return cum
def func_0aab367d05ef46b390e35c8884accce0(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return x
def func_211a1aaca4cc464a948e7eb4e80baa9a(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return rem
def func_756fd32fd67d444e8e2612ac70c7d8fa(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return al
def func_a055967bb6b4433db847ced1069154e3(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_5a9c0c9ed38744f183f344262b3432a3(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return amid
def func_dc58a11994434084b00a1ee96606a8ac(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return s
def func_511de39f78054dca83eea08e84433180(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return i
def func_ce020307352b48219d259a4b96a7545f(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return p
def func_b2f81494903341e58d7c1006e6833bda(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return left
def func_f896245351e64ee69b87c46ea56272f1(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return vsota
def func_43933f774c194a439eeb302f07298b77(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return l
def func_e00277bf12a44fa5a8c73dce8346a802(s, p, l, n):
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_e5c313ffdfcd45c89fb28993b5373cd0(q, s, r, p, n):
l = [((i * p + q) % r + s) for i in range(n)]
vsota = sum(l)
cum = [0]
for x in l:
cum.append(cum[-1] + x)
best = 0.0
for b in range(n - 1, -1, -1):
rem = vsota - (cum[n] - cum[b])
al = 0
ar = b
while al + 1 < ar:
amid = (al + ar) // 2
left = cum[amid]
right = cum[b] - cum[amid]
if left > right:
ar = amid
else:
al = amid
s = [cum[al], cum[b] - cum[al], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
s = [cum[ar], cum[b] - cum[ar], cum[n] - cum[b]]
s.sort()
p = (s[0] + s[1]) / vsota
best = max(best, p)
return best
def func_2754417876f34e4888131d1e4de4ab0f():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
return infile
def func_feeae1e307cf4d48844505f667b8e4f4():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
return t
def func_77b540ea10214a669af5607c8f3ed026(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return q
def func_f8aaa9e4d3f34eff8f9a263dd8ebcf42(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return s
def func_ad89a1d8f4ee48b48cf1e3be5f055b43(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return n
def func_e684277e6bb4403197442599baf48644(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return p
def func_432bfa5e35124dbe81e72a8bde323ad3(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return r
def func_b9e64e96758649399a5fccbd8a17f12d(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return t
def func_a90b8dfc7842451dbad28fba02ae7bef(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return test_case
def func_8cfeb1066b6a4197ab79a36ef168ed4a(t, infile):
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return p
def func_b622543ba1bb4f919edd3f5fe922ab64(t, infile):
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return r
def func_695108182dab45ba905b344e2f5d57d4(t, infile):
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return s
def func_b322de7e33bc4db29f3a471ffd916521(t, infile):
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return n
def func_0598737997e84fae9046f073daac82cf(t, infile):
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return q
def func_5e014fa9c24f4ae8b4b05abeb90c2341(t, infile):
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return test_case
def func_cf71ee69a9b64638a6691741ed6ebd24():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return p
def func_70dc737b8b5145b3b01154a6dfa3aa07():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return test_case
def func_190fdd4ecbdd47baafd23722f95a5fe6():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return infile
def func_34e3a905d6d04c769e8cf0ac707111e4():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return r
def func_c290727c71df43a2a0ce026de3b58beb():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return t
def func_564eef9ebdb04b159222d90e1012e5dd():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return q
def func_8c3a0eec9e0e4b479fc6382277275a09():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return s
def func_0b01a635b02246898a2423067da936ca():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
return n
def func_13e80f6d868e436fae5eec0f94b6434f(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return n
def func_77f5fd74c4ed45ad81f6faf26ae32ff9(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return r
def func_3c981cdf7cd5445d9158a49eaa8d463e(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return q
def func_c83f575793f044d583a314f1231997e2(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return test_case
def func_4d5e6cd9e6e04672b445eaf373b22132(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return s
def func_9d0a6484cf6d4fe09d0d42e352c0cd2f(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return p
def func_11b4851670a14ef5a1a33c042d64d0e7(infile):
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return t
def func_d479679646d2428491995dc5a62be8f6():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return r
def func_a42a0f84b50943fcb23b7df2318193bb():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return s
def func_25fc652dcd9642dea4e56e1d1e970b4f():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return q
def func_c5390140321c45c99bebce638377cde4():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return p
def func_3ec859608fda4033853cc5fd967ae4b1():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return test_case
def func_0972881919434aadb563e88cc2ccd898():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return n
def func_f39848f4299a42988543fc2dac21efd3():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return infile
def func_d169ce73cd164cd08dd3f68f80735eb6():
infile = open('codejam/test_files/Y14R5P1/A.in')
t = int(infile.readline())
for test_case in range(1, t + 1):
n, p, q, r, s = map(int, infile.readline().strip().split())
print 'Case #{0}: {1}'.format(test_case, solve(n, p, q, r, s))
infile.close()
return t
| 25.435581
| 86
| 0.456302
| 30,406
| 202,162
| 3.014504
| 0.018417
| 0.083745
| 0.070566
| 0.063278
| 0.824905
| 0.824327
| 0.823967
| 0.823945
| 0.823727
| 0.822156
| 0
| 0.098253
| 0.37371
| 202,162
| 7,947
| 87
| 25.438782
| 0.625683
| 0
| 0
| 0.928756
| 0
| 0
| 0.005585
| 0.003092
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.000287
| null | null | 0.005171
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b149c646a47259375c64c54a82dad39404509967
| 16,506
|
py
|
Python
|
xenonpy/mdl/model.py
|
mori0711/XenonPy
|
e36ca0ea112b45ee629cd980c88e80cd6c96c514
|
[
"BSD-3-Clause"
] | 93
|
2018-02-11T23:43:47.000Z
|
2022-03-11T02:40:11.000Z
|
xenonpy/mdl/model.py
|
mori0711/XenonPy
|
e36ca0ea112b45ee629cd980c88e80cd6c96c514
|
[
"BSD-3-Clause"
] | 192
|
2018-04-20T04:32:12.000Z
|
2022-03-24T05:59:18.000Z
|
xenonpy/mdl/model.py
|
mori0711/XenonPy
|
e36ca0ea112b45ee629cd980c88e80cd6c96c514
|
[
"BSD-3-Clause"
] | 51
|
2018-01-18T08:08:55.000Z
|
2022-03-01T05:52:22.000Z
|
# Copyright (c) 2021. yoshida-lab. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import pandas as pd
from xenonpy.mdl.base import BaseQuery
class QueryModelDetailsWith(BaseQuery):
common = [
'id',
'transferred',
'succeed',
'isRegression',
'deprecated',
'modelset',
'method',
'property',
'descriptor',
'lang',
]
classification = [
'accuracy',
'precision',
'recall',
'f1',
'sensitivity',
'prevalence',
'specificity',
'ppv',
'npv',
]
regression = [
'meanAbsError',
'maxAbsError',
'meanSquareError',
'rootMeanSquareError',
'r2',
'pValue',
'spearmanCorr',
'pearsonCorr',
]
queryable = common + classification + regression
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
reg, cls = [], []
if 'id' not in query_vars:
query_vars = query_vars + ('id',)
for var in query_vars:
if var in self.common:
# common.append(var)
reg.append(var)
cls.append(var)
elif var in self.regression:
reg.append(var)
elif var in self.classification:
cls.append(var)
return f'''
query (
$modelset_has: [String!]
$property_has: [String!]
$descriptor_has: [String!]
$method_has: [String!]
$lang_has: [String!]
$regression: Boolean
$transferred: Boolean
$deprecated: Boolean
$succeed: Boolean
) {{
queryModelDetailsWith(
modelset_has: $modelset_has
property_has: $property_has
descriptor_has: $descriptor_has
method_has: $method_has
lang_has: $lang_has
regression: $regression
transferred: $transferred
deprecated: $deprecated
succeed: $succeed
) {{
...on Regression {{
{' '.join(reg)}
}}
...on Classification {{
{' '.join(cls)}
}}
}}
}}
'''
class QueryModelDetails(BaseQuery):
common = [
'id',
'transferred',
'succeed',
'isRegression',
'deprecated',
'modelset',
'method',
'property',
'descriptor',
'lang',
]
classification = [
'accuracy',
'precision',
'recall',
'f1',
'sensitivity',
'prevalence',
'specificity',
'ppv',
'npv',
]
regression = [
'meanAbsError',
'maxAbsError',
'meanSquareError',
'rootMeanSquareError',
'r2',
'pValue',
'spearmanCorr',
'pearsonCorr',
]
queryable = common + classification + regression
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
reg, cls = [], []
if 'id' not in query_vars:
query_vars = query_vars + ('id',)
for var in query_vars:
if var in self.common:
reg.append(var)
cls.append(var)
elif var in self.regression:
reg.append(var)
elif var in self.classification:
cls.append(var)
return f'''
query ($query: [String!]!) {{
queryModelDetails(query: $query) {{
...on Regression {{
{' '.join(reg)}
}}
...on Classification {{
{' '.join(cls)}
}}
}}
}}
'''
class GetModelUrls(BaseQuery):
queryable = [
'id',
'etag',
'url',
]
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
return f'''
query ($ids: [Int!]!) {{
getModelUrls(ids: $ids) {{
{' '.join(query_vars)}
}}
}}
'''
class GetModelUrl(BaseQuery):
queryable = [
'id',
'etag',
'url',
]
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
return f'''
query ($id: Int!) {{
getModelUrl(id: $id) {{
{' '.join(query_vars)}
}}
}}
'''
class GetModelDetails(BaseQuery):
common = [
'id',
'transferred',
'succeed',
'isRegression',
'deprecated',
'modelset',
'method',
'property',
'descriptor',
'lang',
]
classification = [
'accuracy',
'precision',
'recall',
'f1',
'sensitivity',
'prevalence',
'specificity',
'ppv',
'npv',
]
regression = [
'meanAbsError',
'maxAbsError',
'meanSquareError',
'rootMeanSquareError',
'r2',
'pValue',
'spearmanCorr',
'pearsonCorr',
]
queryable = common + classification + regression
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
reg, cls = [], []
for var in query_vars:
if var in self.common:
reg.append(var)
cls.append(var)
elif var in self.regression:
reg.append(var)
elif var in self.classification:
cls.append(var)
return f'''
query ($ids: [Int!]!) {{
getModelDetails(ids: $ids) {{
...on Regression {{
{' '.join(reg)}
}}
...on Classification {{
{' '.join(cls)}
}}
}}
}}
'''
pass
class GetModelDetail(BaseQuery):
common = [
'id',
'transferred',
'succeed',
'isRegression',
'deprecated',
'modelset',
'method',
'property',
'descriptor',
'lang',
]
classification = [
'accuracy',
'precision',
'recall',
'f1',
'sensitivity',
'prevalence',
'specificity',
'ppv',
'npv',
]
regression = [
'meanAbsError',
'maxAbsError',
'meanSquareError',
'rootMeanSquareError',
'r2',
'pValue',
'spearmanCorr',
'pearsonCorr',
]
queryable = common + classification + regression
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
reg, cls = [], []
for var in query_vars:
if var in self.common:
reg.append(var)
cls.append(var)
elif var in self.regression:
reg.append(var)
elif var in self.classification:
cls.append(var)
return f'''
query ($id: Int!) {{
getModelDetail(id: $id) {{
...on Regression {{
{' '.join(reg)}
}}
...on Classification {{
{' '.join(cls)}
}}
}}
}}
'''
pass
class GetTrainingInfo(BaseQuery):
queryable = []
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
@staticmethod
def _post(ret, return_json):
return pd.DataFrame(ret)
def gql(self, *query_vars: str):
return f'''
query ($id: Int!) {{
getTrainingInfo(id: $id)
}}
'''
class GetTrainingEnv(BaseQuery):
queryable = []
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
self._return_json = True
def gql(self, *query_vars: str):
return f'''
query ($id: Int!) {{
getTrainingEnv(id: $id)
}}
'''
class GetSupplementary(BaseQuery):
queryable = []
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
self._return_json = True
def gql(self, *query_vars: str):
return f'''
query ($id: Int!) {{
getSupplementary(id: $id)
}}
'''
class ListModelsWithProperty(BaseQuery):
queryable = [
'id',
'transferred',
'succeed',
'isRegression',
'deprecated',
'modelset',
'method',
'property',
'descriptor',
'lang'
]
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
return f'''
query ($name: String!) {{
listModelsWithProperty(name: $name) {{
{' '.join(query_vars)}
}}
}}
'''
class ListModelsWithModelset(BaseQuery):
queryable = [
'id',
'transferred',
'succeed',
'isRegression',
'deprecated',
'modelset',
'method',
'property',
'descriptor',
'lang'
]
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
return f'''
query ($name: String!) {{
listModelsWithModelset(name: $name) {{
{' '.join(query_vars)}
}}
}}
'''
class ListModelsWithMethod(BaseQuery):
queryable = [
'id',
'transferred',
'succeed',
'isRegression',
'deprecated',
'modelset',
'method',
'property',
'descriptor',
'lang'
]
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
return f'''
query ($name: String!) {{
listModelsWithMethod(name: $name) {{
{' '.join(query_vars)}
}}
}}
'''
class ListModelsWithDescriptor(BaseQuery):
queryable = [
'id',
'transferred',
'succeed',
'isRegression',
'deprecated',
'modelset',
'method',
'property',
'descriptor',
'lang'
]
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
return f'''
query ($name: String!) {{
listModelsWithDescriptor(name: $name) {{
{' '.join(query_vars)}
}}
}}
'''
class UploadModel(BaseQuery):
queryable = [
'id',
'etag',
'path'
]
def __init__(self, variables, *, api_key: str = 'anonymous.user.key',
endpoint: str = 'http://xenon.ism.ac.jp/api'):
"""
Access to XenonPy.MDL library.
Parameters
----------
api_key
Not implement yet.
"""
super().__init__(variables=variables, api_key=api_key, endpoint=endpoint)
def gql(self, *query_vars: str):
return f"""
mutation(
$id: Int!
$describe: UploadModelInput!
$model: Upload!
$training_env: Json
$training_info: Json
$supplementary: Json
) {{
uploadModel(
modelsetId: $id
model: $model
describe: $describe
training_env: $training_env
training_info: $training_info
supplementary: $supplementary
) {{
{' '.join(query_vars)}
}}
}}
"""
| 25.009091
| 81
| 0.451048
| 1,324
| 16,506
| 5.452417
| 0.109517
| 0.046544
| 0.05818
| 0.038787
| 0.808422
| 0.806067
| 0.791661
| 0.791661
| 0.791661
| 0.77864
| 0
| 0.001254
| 0.42009
| 16,506
| 659
| 82
| 25.047041
| 0.752925
| 0.082637
| 0
| 0.811563
| 0
| 0
| 0.425459
| 0.010997
| 0
| 0
| 0
| 0
| 0
| 1
| 0.062099
| false
| 0.004283
| 0.004283
| 0.023555
| 0.184154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1a854b32a606c071f650be42f138cd5c6b1db76
| 23,198
|
py
|
Python
|
packages/syft/src/syft/proto/grid/messages/setup_messages_pb2.py
|
callezenwaka/PySyft
|
2545c302441cfe727ec095c4f9aa136bff02be32
|
[
"Apache-1.1"
] | 1
|
2021-09-14T10:56:43.000Z
|
2021-09-14T10:56:43.000Z
|
packages/syft/src/syft/proto/grid/messages/setup_messages_pb2.py
|
callezenwaka/PySyft
|
2545c302441cfe727ec095c4f9aa136bff02be32
|
[
"Apache-1.1"
] | 2
|
2021-04-02T10:12:44.000Z
|
2021-04-02T10:12:50.000Z
|
packages/syft/src/syft/proto/grid/messages/setup_messages_pb2.py
|
callezenwaka/PySyft
|
2545c302441cfe727ec095c4f9aa136bff02be32
|
[
"Apache-1.1"
] | 1
|
2021-08-19T12:23:01.000Z
|
2021-08-19T12:23:01.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/grid/messages/setup_messages.proto
"""Generated protocol buffer code."""
# third party
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
# syft absolute
from syft.proto.core.common import (
common_object_pb2 as proto_dot_core_dot_common_dot_common__object__pb2,
)
from syft.proto.core.io import address_pb2 as proto_dot_core_dot_io_dot_address__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="proto/grid/messages/setup_messages.proto",
package="syft.grid.messages",
syntax="proto3",
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n(proto/grid/messages/setup_messages.proto\x12\x12syft.grid.messages\x1a%proto/core/common/common_object.proto\x1a\x1bproto/core/io/address.proto"\xd7\x01\n\x19\x43reateInitialSetUpMessage\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12&\n\x07\x61\x64\x64ress\x18\x02 \x01(\x0b\x32\x15.syft.core.io.Address\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05\x65mail\x18\x04 \x01(\t\x12\x10\n\x08password\x18\x05 \x01(\t\x12\x13\n\x0b\x64omain_name\x18\x06 \x01(\t\x12\'\n\x08reply_to\x18\x07 \x01(\x0b\x32\x15.syft.core.io.Address"\x9a\x01\n\x0fGetSetUpMessage\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12&\n\x07\x61\x64\x64ress\x18\x02 \x01(\x0b\x32\x15.syft.core.io.Address\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\x12\'\n\x08reply_to\x18\x04 \x01(\x0b\x32\x15.syft.core.io.Address"\x87\x01\n\x10GetSetUpResponse\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x13\n\x0bstatus_code\x18\x02 \x01(\x05\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\x12&\n\x07\x61\x64\x64ress\x18\x04 \x01(\x0b\x32\x15.syft.core.io.Address"\x9d\x01\n\x12UpdateSetupMessage\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12&\n\x07\x61\x64\x64ress\x18\x02 \x01(\x0b\x32\x15.syft.core.io.Address\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\x12\'\n\x08reply_to\x18\x04 \x01(\x0b\x32\x15.syft.core.io.Address"\x8a\x01\n\x13UpdateSetupResponse\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x13\n\x0bstatus_code\x18\x02 \x01(\x05\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\x12&\n\x07\x61\x64\x64ress\x18\x04 \x01(\x0b\x32\x15.syft.core.io.Addressb\x06proto3',
dependencies=[
proto_dot_core_dot_common_dot_common__object__pb2.DESCRIPTOR,
proto_dot_core_dot_io_dot_address__pb2.DESCRIPTOR,
],
)
_CREATEINITIALSETUPMESSAGE = _descriptor.Descriptor(
name="CreateInitialSetUpMessage",
full_name="syft.grid.messages.CreateInitialSetUpMessage",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.CreateInitialSetUpMessage.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.CreateInitialSetUpMessage.address",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="name",
full_name="syft.grid.messages.CreateInitialSetUpMessage.name",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="email",
full_name="syft.grid.messages.CreateInitialSetUpMessage.email",
index=3,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="password",
full_name="syft.grid.messages.CreateInitialSetUpMessage.password",
index=4,
number=5,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="domain_name",
full_name="syft.grid.messages.CreateInitialSetUpMessage.domain_name",
index=5,
number=6,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="reply_to",
full_name="syft.grid.messages.CreateInitialSetUpMessage.reply_to",
index=6,
number=7,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=133,
serialized_end=348,
)
_GETSETUPMESSAGE = _descriptor.Descriptor(
name="GetSetUpMessage",
full_name="syft.grid.messages.GetSetUpMessage",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.GetSetUpMessage.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.GetSetUpMessage.address",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="content",
full_name="syft.grid.messages.GetSetUpMessage.content",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="reply_to",
full_name="syft.grid.messages.GetSetUpMessage.reply_to",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=351,
serialized_end=505,
)
_GETSETUPRESPONSE = _descriptor.Descriptor(
name="GetSetUpResponse",
full_name="syft.grid.messages.GetSetUpResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.GetSetUpResponse.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="status_code",
full_name="syft.grid.messages.GetSetUpResponse.status_code",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="content",
full_name="syft.grid.messages.GetSetUpResponse.content",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.GetSetUpResponse.address",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=508,
serialized_end=643,
)
_UPDATESETUPMESSAGE = _descriptor.Descriptor(
name="UpdateSetupMessage",
full_name="syft.grid.messages.UpdateSetupMessage",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.UpdateSetupMessage.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.UpdateSetupMessage.address",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="content",
full_name="syft.grid.messages.UpdateSetupMessage.content",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="reply_to",
full_name="syft.grid.messages.UpdateSetupMessage.reply_to",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=646,
serialized_end=803,
)
_UPDATESETUPRESPONSE = _descriptor.Descriptor(
name="UpdateSetupResponse",
full_name="syft.grid.messages.UpdateSetupResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="msg_id",
full_name="syft.grid.messages.UpdateSetupResponse.msg_id",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="status_code",
full_name="syft.grid.messages.UpdateSetupResponse.status_code",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="content",
full_name="syft.grid.messages.UpdateSetupResponse.content",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="address",
full_name="syft.grid.messages.UpdateSetupResponse.address",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=806,
serialized_end=944,
)
_CREATEINITIALSETUPMESSAGE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_CREATEINITIALSETUPMESSAGE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_CREATEINITIALSETUPMESSAGE.fields_by_name[
"reply_to"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_GETSETUPMESSAGE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_GETSETUPMESSAGE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_GETSETUPMESSAGE.fields_by_name[
"reply_to"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_GETSETUPRESPONSE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_GETSETUPRESPONSE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_UPDATESETUPMESSAGE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_UPDATESETUPMESSAGE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_UPDATESETUPMESSAGE.fields_by_name[
"reply_to"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
_UPDATESETUPRESPONSE.fields_by_name[
"msg_id"
].message_type = proto_dot_core_dot_common_dot_common__object__pb2._UID
_UPDATESETUPRESPONSE.fields_by_name[
"address"
].message_type = proto_dot_core_dot_io_dot_address__pb2._ADDRESS
DESCRIPTOR.message_types_by_name[
"CreateInitialSetUpMessage"
] = _CREATEINITIALSETUPMESSAGE
DESCRIPTOR.message_types_by_name["GetSetUpMessage"] = _GETSETUPMESSAGE
DESCRIPTOR.message_types_by_name["GetSetUpResponse"] = _GETSETUPRESPONSE
DESCRIPTOR.message_types_by_name["UpdateSetupMessage"] = _UPDATESETUPMESSAGE
DESCRIPTOR.message_types_by_name["UpdateSetupResponse"] = _UPDATESETUPRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CreateInitialSetUpMessage = _reflection.GeneratedProtocolMessageType(
"CreateInitialSetUpMessage",
(_message.Message,),
{
"DESCRIPTOR": _CREATEINITIALSETUPMESSAGE,
"__module__": "proto.grid.messages.setup_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.CreateInitialSetUpMessage)
},
)
_sym_db.RegisterMessage(CreateInitialSetUpMessage)
GetSetUpMessage = _reflection.GeneratedProtocolMessageType(
"GetSetUpMessage",
(_message.Message,),
{
"DESCRIPTOR": _GETSETUPMESSAGE,
"__module__": "proto.grid.messages.setup_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.GetSetUpMessage)
},
)
_sym_db.RegisterMessage(GetSetUpMessage)
GetSetUpResponse = _reflection.GeneratedProtocolMessageType(
"GetSetUpResponse",
(_message.Message,),
{
"DESCRIPTOR": _GETSETUPRESPONSE,
"__module__": "proto.grid.messages.setup_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.GetSetUpResponse)
},
)
_sym_db.RegisterMessage(GetSetUpResponse)
UpdateSetupMessage = _reflection.GeneratedProtocolMessageType(
"UpdateSetupMessage",
(_message.Message,),
{
"DESCRIPTOR": _UPDATESETUPMESSAGE,
"__module__": "proto.grid.messages.setup_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.UpdateSetupMessage)
},
)
_sym_db.RegisterMessage(UpdateSetupMessage)
UpdateSetupResponse = _reflection.GeneratedProtocolMessageType(
"UpdateSetupResponse",
(_message.Message,),
{
"DESCRIPTOR": _UPDATESETUPRESPONSE,
"__module__": "proto.grid.messages.setup_messages_pb2"
# @@protoc_insertion_point(class_scope:syft.grid.messages.UpdateSetupResponse)
},
)
_sym_db.RegisterMessage(UpdateSetupResponse)
# @@protoc_insertion_point(module_scope)
| 33.767103
| 1,639
| 0.620872
| 2,428
| 23,198
| 5.582372
| 0.071664
| 0.043677
| 0.065885
| 0.057769
| 0.80242
| 0.790173
| 0.733142
| 0.72082
| 0.718681
| 0.707024
| 0
| 0.036077
| 0.283085
| 23,198
| 686
| 1,640
| 33.816327
| 0.778907
| 0.027675
| 0
| 0.810769
| 1
| 0.004615
| 0.144461
| 0.114291
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.004615
| 0.009231
| 0
| 0.009231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
492b654e8e7935fc915b06c1a6be65b9fa99b431
| 57,578
|
py
|
Python
|
tests/unit/KeysLookupTests.py
|
risk-frontiers/OasisIntegration
|
ddcc86263d4022d3e3f9a0f3a5860476bdf164b1
|
[
"Condor-1.1",
"RSA-MD",
"Naumen",
"Xnet",
"X11",
"MS-PL"
] | 1
|
2020-09-25T08:56:23.000Z
|
2020-09-25T08:56:23.000Z
|
tests/unit/KeysLookupTests.py
|
risk-frontiers/OasisIntegration
|
ddcc86263d4022d3e3f9a0f3a5860476bdf164b1
|
[
"Condor-1.1",
"RSA-MD",
"Naumen",
"Xnet",
"X11",
"MS-PL"
] | 2
|
2020-08-12T16:04:36.000Z
|
2020-08-14T01:20:13.000Z
|
tests/unit/KeysLookupTests.py
|
risk-frontiers/OasisIntegration
|
ddcc86263d4022d3e3f9a0f3a5860476bdf164b1
|
[
"Condor-1.1",
"RSA-MD",
"Naumen",
"Xnet",
"X11",
"MS-PL"
] | null | null | null |
import unittest
import copy
from parameterized import parameterized
import itertools
from datetime import date
from oasislmf.utils.coverages import COVERAGE_TYPES
from complex_model import HailAUSKeysLookup
from complex_model.Common import *
from complex_model.utils import to_bool
from tests.unit.RFBaseTest import RFBaseTestCase
OED_COVERAGES = [COVERAGE_TYPES["buildings"], COVERAGE_TYPES["contents"], COVERAGE_TYPES["other"], COVERAGE_TYPES["bi"]]
OED_STRUCTURE_COVERAGES = [COVERAGE_TYPES["buildings"], COVERAGE_TYPES["contents"], COVERAGE_TYPES["bi"]]
OED_MOTOR_COVERAGES = [COVERAGE_TYPES["buildings"]]
OED_NON_BI_COVERAGES = [COVERAGE_TYPES["buildings"], COVERAGE_TYPES["contents"]]
OED_BUILDING_CONTENTS_COVERAGES = [COVERAGE_TYPES["buildings"], COVERAGE_TYPES["contents"]]
# OCCUPANCY CODES
DEFAULT_OCCUPANCY_CODES = {}
OCCUPANCY_CODES = {}
for key in OED_OCCUPANCY_CODE:
OCCUPANCY_CODES[key] = [y for x in OED_OCCUPANCY_CODE[key] for y in range(x["min"], x["max"] + 1)]
DEFAULT_OCCUPANCY_CODES[key] = OED_OCCUPANCY_CODE[key][0]['min']
DEFAULT_SUPPORTED_OCCUPANCY_CODES = [DEFAULT_OCCUPANCY_CODES[key] for key in {"residential", "commercial", "industrial"}]
# CONSTRUCTION CODES
DEFAULT_CONSTRUCTION_CODES = {}
CONSTRUCTION_CODES = {}
for key in OED_CONSTRUCTION_CODE:
CONSTRUCTION_CODES[key] = [y for x in OED_CONSTRUCTION_CODE[key] for y in range(x["min"], x["max"] + 1)]
DEFAULT_CONSTRUCTION_CODES[key] = OED_CONSTRUCTION_CODE[key][0]['min']
OK_COVERAGES_COMMERCIAL_INDUSTRIAL = [[coverage, oc] for coverage, oc
in itertools.product(OED_STRUCTURE_COVERAGES,
[DEFAULT_OCCUPANCY_CODES["commercial"],
DEFAULT_OCCUPANCY_CODES["industrial"]])]
OK_NON_BI_COVERAGES_RESIDENTIAL = [[coverage, oc] for coverage, oc
in itertools.product(OED_BUILDING_CONTENTS_COVERAGES,
[DEFAULT_OCCUPANCY_CODES["residential"]])]
OK_COVERAGES_OCCUPANCY_COMBINATION = OK_COVERAGES_COMMERCIAL_INDUSTRIAL + OK_NON_BI_COVERAGES_RESIDENTIAL
FAIL_COVERAGES_OCCUPANCY_COMBINATION = [[COVERAGE_TYPES["bi"], DEFAULT_OCCUPANCY_CODES["residential"]]]
NON_MOTOR_COMMERCIAL_INDUSTRIAL = [[coverage, oc] for coverage, oc
in itertools.product(OED_STRUCTURE_COVERAGES,
[DEFAULT_OCCUPANCY_CODES["commercial"],
DEFAULT_OCCUPANCY_CODES["industrial"]])]
BUILDING_CONTENTS_RESIDENTIAL = [[coverage, oc] for coverage, oc
in itertools.product(OED_BUILDING_CONTENTS_COVERAGES,
[DEFAULT_OCCUPANCY_CODES["residential"]])]
OK_NON_MOTOR_COVERAGES_OCCUPANCY_COMBINATION = NON_MOTOR_COMMERCIAL_INDUSTRIAL + BUILDING_CONTENTS_RESIDENTIAL
SMV_ON = [1, 2, '1', 'true', 'True', 'TRUE']
SMV_OFF = [0, '0', 'false', 'False', 'FALSE']
class OccupancyCodeTests(RFBaseTestCase):
"""This test case provides validation that the implemented lob lookup error_code implements
the specification as described in the documentation of the oasis integration documentation
"""
@parameterized.expand([[cc] for cc in OCCUPANCY_CODES["unsupported"]])
def test_get_lob_id_unsupported_occupancy(self, cc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
self.assertRaisesWithErrorCode(230, lookup._get_lob_id, {"occupancycode": cc})
@parameterized.expand([[cc] for cc in OCCUPANCY_CODES["residential"]])
def test_get_lob_id_residential(self, cc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
self.assertEqual(lookup._get_lob_id({"occupancycode": cc}), 1)
@parameterized.expand([[cc] for cc in OCCUPANCY_CODES["commercial"]])
def test_get_lob_id_commercial(self, cc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
self.assertEqual(lookup._get_lob_id({"occupancycode": cc}), 2)
@parameterized.expand([[cc] for cc in OCCUPANCY_CODES["industrial"]])
def test_get_lob_id_industrial(self, cc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
self.assertEqual(lookup._get_lob_id({"occupancycode": cc}), 3)
class ConstructionCodeTests(RFBaseTestCase):
"""This test case provides check for motor exposure methods
"""
@parameterized.expand([[cc] for cc in CONSTRUCTION_CODES["motor"]])
def test_is_motor_true(self, cc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'constructioncode': cc}
self.assertTrue(lookup._is_motor(loc))
@parameterized.expand([[cc] for cc in CONSTRUCTION_CODES["structure"]])
def test_is_motor_false(self, cc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'constructioncode': cc}
self.assertFalse(lookup._is_motor(loc))
@parameterized.expand([[cc] for cc in CONSTRUCTION_CODES["unsupported"]])
def test_unsupported_construction_codes(self, cc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'constructioncode': cc}
self.assertTrue(not lookup._is_motor(loc))
class OEDGeogSchemeTests(RFBaseTestCase):
"""This test case provides validation that the GeogScheme columns are used as described in the
specification included in the oasis integration documentation
"""
def test_gnaf_geogscheme(self):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
uni_exposure = lookup._add_geog_name({}, "GNAF", "GANSW123456789")
self.assertEqual("GANSW123456789", uni_exposure["address_id"])
self.assertEqual(EnumAddressType.GNAF.value, uni_exposure["address_type"])
def test_pc4_geogscheme(self):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
uni_exposure = lookup._add_geog_name({}, "PC4", 2000)
self.assertEqual(uni_exposure["med_id"], 2000)
self.assertEqual(uni_exposure["med_type"], EnumResolution.Postcode.value)
def test_ica_bound_geogscheme(self):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
uni_exposure = lookup._add_geog_name({}, "ICA", 0)
self.assertFalse('lrg_id' in uni_exposure)
uni_exposure = lookup._add_geog_name({}, "ICA", 50)
self.assertFalse('lrg_id' in uni_exposure)
@parameterized.expand([[c] for c in range(1, 50)])
def test_ica_geogscheme(self, c):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
uni_exposure = lookup._add_geog_name({}, "ICA", c)
self.assertEqual(uni_exposure["lrg_id"], c)
self.assertEqual(uni_exposure["lrg_type"], EnumResolution.IcaZone.value)
def test_cro_bound_geogscheme(self):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
uni_exposure = lookup._add_geog_name({}, "CRO", 0)
self.assertFalse('zone_id' in uni_exposure)
uni_exposure = lookup._add_geog_name({}, "CRO", 50)
self.assertFalse('zone_id' in uni_exposure)
@parameterized.expand([[c] for c in range(1, 50)])
def test_cro_geogscheme(self, c):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
uni_exposure = lookup._add_geog_name({}, "CRO", c)
self.assertEqual(uni_exposure["zone_id"], c)
self.assertEqual(uni_exposure["zone_type"], EnumResolution.Cresta.value)
class CreateUniExposureTests(RFBaseTestCase):
"""This test ensures that create_uni_exposure method behaves as expected
1. required field are reported as failed lookup
2. unsupported peril
3. cascading geolocation
"""
@parameterized.expand([[coverage] for coverage in OED_COVERAGES])
def test_required_fields(self, coverage):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
# missing 'locperilscovered' and 'loc_id'
loc = {}
self.assertRaisesWithErrorCode(101, lookup.create_uni_exposure, loc, coverage["id"])
# missing 'loc_id'
loc = {'locperilscovered': 'AA1'}
self.assertRaisesWithErrorCode(102, lookup.create_uni_exposure, loc, coverage["id"])
# missing 'locperilscovered'
loc = {'loc_id': 'loc1'}
self.assertRaisesWithErrorCode(101, lookup.create_uni_exposure, loc, coverage["id"])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_unsupported_values(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'occupancycode': oc}
# unsupported peril
loc = copy.deepcopy(default_loc)
loc.update({'locperilscovered': 'QQ1'})
self.assertRaisesWithErrorCode(122, lookup.create_uni_exposure, loc, coverage['id'])
# unsupported lob_id
loc = copy.deepcopy(default_loc)
loc.update({'occupancycode': 0})
self.assertRaisesWithErrorCode(230, lookup.create_uni_exposure, loc, coverage['id'])
loc = copy.deepcopy(default_loc)
loc.update({'occupancycode': 'a'})
self.assertRaisesWithErrorCode(123, lookup.create_uni_exposure, loc, coverage['id'])
# unsupported year_built
loc = copy.deepcopy(default_loc)
loc.update({"yearbuilt": 0})
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(0, exposure["props"]["YearBuilt"])
loc.update({"yearbuilt": 'a'})
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertFalse('YearBuilt' in exposure["props"])
loc.update({"yearbuilt": 2001})
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(2001, exposure["props"]["YearBuilt"])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_level(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': "GNAF", 'geogname1': "GANSW123456789", 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual("GANSW123456789", exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(EnumResolution.Address.value, exposure['best_res'])
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
self.assertFalse('state' in exposure and exposure['state'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_level_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': "GNAF", 'geogname1': "GANSW123456789", 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_state(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': "GNAF", 'geogname1': "GANSW123456789",
'areacode': 'nsw', 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual("GANSW123456789", exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(EnumResolution.Address.value, exposure['best_res'])
self.assertEqual('nsw', exposure['state'].lower())
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_state_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': "GNAF", 'geogname1': "GANSW123456789",
'areacode': 'nsw', 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_level(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
self.assertFalse('state' in exposure and exposure['state'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_level_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'GNAF', 'geogname1': 'GANSW123456789',
'latitude': latitude, 'longitude': longitude, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual('GANSW123456789', exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'GNAF', 'geogname1': 'GANSW123456789',
'latitude': latitude, 'longitude': longitude, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_postcode_level(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'postalcode': 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(2000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.Postcode.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_postcode_level_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'postalcode': 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_pc4_level(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': "PC4", "geogname1": 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(2000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.Postcode.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_pc4_level_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': "PC4", "geogname1": 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_postcode(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'GNAF', 'geogname1': 'GANSW123456789',
'postalcode': 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual('GANSW123456789', exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(2000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.Address.value, exposure['best_res'])
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_postcode_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'GNAF', 'geogname1': 'GANSW123456789',
'postalcode': 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_postcode(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'postalcode': 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual(2000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_postcode_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'postalcode': 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_pc4(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'PC4', 'geogname1': 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual(2000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_pc4_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'PC4', 'geogname1': 2000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_postcode_pc4(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'PC4', 'geogname1': 2000,
'postalcode': 4000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual(4000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_postcode_pc4_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'PC4', 'geogname1': 2000,
'postalcode': 4000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_postcode_pc4(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme2': 'GNAF', 'geogname2': 'GANSW123456789',
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'PC4', 'geogname1': 2000,
'postalcode': 4000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual('GANSW123456789', exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(4000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_postcode_pc4_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme2': 'GNAF', 'geogname2': 'GANSW123456789',
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'PC4', 'geogname1': 2000,
'postalcode': 4000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_cresta_level(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'CRO', 'geogname1': 49, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(49, exposure['zone_id'])
self.assertEqual(EnumResolution.Cresta.value, exposure['zone_type'])
self.assertEqual(EnumResolution.Cresta.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_cresta_level_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'CRO', 'geogname1': 49, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_cresta(self, coverage, occupancy_code):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'GNAF', 'geogname1': 'GANSW123456789',
'geogscheme2': 'CRO', 'geogname2': 49,
"occupancycode": occupancy_code}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual('GANSW123456789', exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(49, exposure['zone_id'])
self.assertEqual(EnumResolution.Cresta.value, exposure['zone_type'])
self.assertEqual(EnumResolution.Address.value, exposure['best_res'])
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_cresta_fail(self, coverage, occupancy_code):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'GNAF', 'geogname1': 'GANSW123456789',
'geogscheme2': 'CRO', 'geogname2': 49,
"occupancycode": occupancy_code}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_cresta(self, coverage, occupancy_code):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'CRO', 'geogname1': 49,
"occupancycode": occupancy_code}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual(49, exposure['zone_id'])
self.assertEqual(EnumResolution.Cresta.value, exposure['zone_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_cresta_fail(self, coverage, occupancy_code):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'CRO', 'geogname1': 49,
"occupancycode": occupancy_code}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_postcode_cresta(self, coverage, occupancy_code):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'CRO', 'geogname1': 49,
'postalcode': 4000,
"occupancycode": occupancy_code}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(49, exposure['zone_id'])
self.assertEqual(EnumResolution.Cresta.value, exposure['zone_type'])
self.assertEqual(4000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.Postcode.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_postcode_cresta_fail(self, coverage, occupancy_code):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'CRO', 'geogname1': 49,
'postalcode': 4000,
"occupancycode": occupancy_code}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_postcode_cresta(self, coverage, occupancy_code):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'CRO', 'geogname1': 49,
'geogscheme2': 'GNAF', 'geogname2': 'GANSW123456789',
'postalcode': 4000,
"occupancycode": occupancy_code}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual('GANSW123456789', exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(49, exposure['zone_id'])
self.assertEqual(EnumResolution.Cresta.value, exposure['zone_type'])
self.assertEqual(4000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('lrg_id' in exposure and exposure['lrg_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_postcode_cresta_fail(self, coverage, occupancy_code):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'CRO', 'geogname1': 49,
'geogscheme2': 'GNAF', 'geogname2': 'GANSW123456789',
'postalcode': 4000,
"occupancycode": occupancy_code}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_ica_zone_level(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'ICA', 'geogname1': 49, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(49, exposure['lrg_id'])
self.assertEqual(EnumResolution.IcaZone.value, exposure['lrg_type'])
self.assertEqual(EnumResolution.IcaZone.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_ica_zone_level_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'ICA', 'geogname1': 49, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_ica_zone_cresta_level(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'ICA', 'geogname1': 49,
'geogscheme2': 'CRO', 'geogname2': 2, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(49, exposure['lrg_id'])
self.assertEqual(EnumResolution.IcaZone.value, exposure['lrg_type'])
self.assertEqual(2, exposure['zone_id'])
self.assertEqual(EnumResolution.Cresta.value, exposure['zone_type'])
self.assertEqual(EnumResolution.Cresta.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_ica_zone_cresta_level_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'ICA', 'geogname1': 49,
'geogscheme2': 'CRO', 'geogname2': 2, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_ica_zone(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'GNAF', 'geogname1': 'GANSW123456789',
'geogscheme2': 'ICA', 'geogname2': 49, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual('GANSW123456789', exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(49, exposure['lrg_id'])
self.assertEqual(EnumResolution.IcaZone.value, exposure['lrg_type'])
self.assertEqual(EnumResolution.Address.value, exposure['best_res'])
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_ica_zone_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'GNAF', 'geogname1': 'GANSW123456789',
'geogscheme2': 'ICA', 'geogname2': 49, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_ica_zone(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'ICA', 'geogname1': 49, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual(49, exposure['lrg_id'])
self.assertEqual(EnumResolution.IcaZone.value, exposure['lrg_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
self.assertFalse('med_id' in exposure and exposure['med_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_latlon_ica_zone_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'ICA', 'geogname1': 49, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_postcode_ica_zone(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'ICA', 'geogname1': 49,
'postalcode': 4000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(49, exposure['lrg_id'])
self.assertEqual(EnumResolution.IcaZone.value, exposure['lrg_type'])
self.assertEqual(4000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.Postcode.value, exposure['best_res'])
self.assertFalse('address_id' in exposure and exposure['address_id'] is not None)
self.assertFalse('latitude' in exposure and exposure['latitude'] is not None)
self.assertFalse('longitude' in exposure and exposure['longitude'] is not None)
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_postcode_ica_zone_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'geogscheme1': 'ICA', 'geogname1': 49,
'postalcode': 4000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_postcode_ica_zone(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'ICA', 'geogname1': 49,
'geogscheme2': 'GNAF', 'geogname2': 'GANSW123456789',
'postalcode': 4000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual('GANSW123456789', exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(49, exposure['lrg_id'])
self.assertEqual(EnumResolution.IcaZone.value, exposure['lrg_type'])
self.assertEqual(4000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertFalse('zone_id' in exposure and exposure['zone_id'] is not None)
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_postcode_ica_zone_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'ICA', 'geogname1': 49,
'geogscheme2': 'GNAF', 'geogname2': 'GANSW123456789',
'postalcode': 4000, 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand(OK_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_postcode_cresta_ica_zone_state(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
lookup._supported_gnaf = ['GANSW123456789']
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'ICA', 'geogname1': 49,
'geogscheme2': 'GNAF', 'geogname2': 'GANSW123456789',
'geogscheme3': 'CRO', 'geogname3': 2,
'postalcode': 4000, 'areacode': 'nsw', 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, coverage['id'])
self.assertEqual(latitude, exposure['latitude'])
self.assertEqual(longitude, exposure['longitude'])
self.assertEqual('GANSW123456789', exposure['address_id'])
self.assertEqual(EnumAddressType.GNAF.value, exposure['address_type'])
self.assertEqual(49, exposure['lrg_id'])
self.assertEqual(EnumResolution.IcaZone.value, exposure['lrg_type'])
self.assertEqual(2, exposure['zone_id'])
self.assertEqual(EnumResolution.Cresta.value, exposure['zone_type'])
self.assertEqual(4000, exposure['med_id'])
self.assertEqual(EnumResolution.Postcode.value, exposure['med_type'])
self.assertEqual(EnumResolution.LatLong.value, exposure['best_res'])
self.assertEqual('nsw', exposure['state'].lower())
@parameterized.expand(FAIL_COVERAGES_OCCUPANCY_COMBINATION)
def test_address_latlon_postcode_cresta_ica_zone_state_fail(self, coverage, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
latitude = -33.8688
longitude = 151.2093
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': latitude, 'longitude': longitude,
'geogscheme1': 'ICA', 'geogname1': 49,
'geogscheme2': 'GNAF', 'geogname2': 'GANSW123456789',
'geogscheme3': 'CRO', 'geogname3': 2,
'postalcode': 4000, 'areacode': 'nsw', 'occupancycode': oc}
loc = copy.deepcopy(default_loc)
self.assertRaisesWithErrorCode(151, lookup.create_uni_exposure, loc, coverage['id'])
@parameterized.expand([[cc, oc] for cc in CONSTRUCTION_CODES["motor"] for oc in DEFAULT_SUPPORTED_OCCUPANCY_CODES])
def test_motor_exposure(self, cc, oc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'postalcode': 2000}
loc = copy.deepcopy(default_loc)
loc.update({'constructioncode': cc, 'occupancycode': oc})
exposure = lookup.create_uni_exposure(loc, COVERAGE_TYPES['buildings']['id'])
self.assertEqual(EnumCover.Motor.value, exposure['cover_id'])
class CreateUniExposureOEDFieldsTests(RFBaseTestCase):
"""This test ensures that YearBuilt is parsed properly from OED
"""
@parameterized.expand([[year] for year in range(date.today().year + 1)])
def test_year_built_values(self, year):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'yearbuilt': year}
# unsupported peril
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, COVERAGE_TYPES['buildings']['id'])
self.assertEqual(year, exposure["props"]["YearBuilt"])
def test_static_motor_default(self):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000}
# unsupported peril
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, COVERAGE_TYPES['buildings']['id'])
self.assertTrue("StaticMotor" not in exposure["props"])
@parameterized.expand([[smv] for smv in SMV_ON])
def test_static_motor_on(self, smv):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'staticmotorvehicle': smv}
# unsupported peril
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, COVERAGE_TYPES['buildings']['id'])
self.assertEqual(True, exposure["props"]["StaticMotor"])
@parameterized.expand([[smv] for smv in SMV_OFF])
def test_static_motor_off(self, smv):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'staticmotorvehicle': smv}
# unsupported peril
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, COVERAGE_TYPES['buildings']['id'])
self.assertEqual(False, exposure["props"]["StaticMotor"])
@parameterized.expand([[smv] for smv in ['a', None]])
def test_static_motor_bad_values(self, smv):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'staticmotorvehicle': smv}
# unsupported peril
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, COVERAGE_TYPES['buildings']['id'])
self.assertTrue("StaticMotor" not in exposure["props"])
@parameterized.expand([[smv, cc] for cc in CONSTRUCTION_CODES["motor"] for smv in SMV_OFF + SMV_ON])
def test_static_motor_marine(self, smv, cc):
lookup = HailAUSKeysLookup(keys_data_directory=None, model_name="hailAus")
default_loc = {'locperilscovered': 'AA1', 'loc_id': 1,
'latitude': -33.8688, 'longitude': 151.2093,
'postalcode': 2000, 'constructioncode': cc, "staticmotorvehicle": smv}
# unsupported peril
loc = copy.deepcopy(default_loc)
exposure = lookup.create_uni_exposure(loc, COVERAGE_TYPES['buildings']['id'])
is_marine = lookup._check_in_group(loc, 'motor_marine', lookup._codes_mapping["construction"])
if is_marine:
self.assertTrue(exposure["props"]["StaticMotor"])
else:
self.assertEqual(to_bool(smv), exposure["props"]["StaticMotor"])
if __name__ == '__main__':
unittest.main()
| 53.660764
| 121
| 0.668016
| 6,221
| 57,578
| 5.966565
| 0.034239
| 0.055364
| 0.028719
| 0.046393
| 0.919096
| 0.91166
| 0.902931
| 0.890457
| 0.876583
| 0.862897
| 0
| 0.032581
| 0.21213
| 57,578
| 1,072
| 122
| 53.710821
| 0.785645
| 0.015874
| 0
| 0.796296
| 0
| 0
| 0.153684
| 0
| 0
| 0
| 0
| 0
| 0.300926
| 1
| 0.078704
| false
| 0
| 0.011574
| 0
| 0.096065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
493d7cc545169b4eff317db6ddd80a036f471852
| 105
|
py
|
Python
|
leo/strategy/__init__.py
|
Leonardo-YXH/easytrader
|
2216f2c45ad333afc13ab90ed4bfbc39708c31f5
|
[
"MIT"
] | null | null | null |
leo/strategy/__init__.py
|
Leonardo-YXH/easytrader
|
2216f2c45ad333afc13ab90ed4bfbc39708c31f5
|
[
"MIT"
] | null | null | null |
leo/strategy/__init__.py
|
Leonardo-YXH/easytrader
|
2216f2c45ad333afc13ab90ed4bfbc39708c31f5
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
"""
@author: leonardo
@created time: 2020-09-19
@last modified time:2020-09-19
"""
| 17.5
| 30
| 0.638095
| 16
| 105
| 4.1875
| 0.75
| 0.238806
| 0.298507
| 0.358209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184783
| 0.12381
| 105
| 6
| 31
| 17.5
| 0.543478
| 0.914286
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4966830b8d503ada91c95735629955df7eb9ef30
| 64,066
|
py
|
Python
|
phonopy/phonon/character_table.py
|
ttadano/phonopy
|
8c03955b2636b22b86e9324f5afcfa36396fa988
|
[
"BSD-3-Clause"
] | 1
|
2021-07-10T20:15:31.000Z
|
2021-07-10T20:15:31.000Z
|
phonopy/phonon/character_table.py
|
ttadano/phonopy
|
8c03955b2636b22b86e9324f5afcfa36396fa988
|
[
"BSD-3-Clause"
] | null | null | null |
phonopy/phonon/character_table.py
|
ttadano/phonopy
|
8c03955b2636b22b86e9324f5afcfa36396fa988
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (C) 2020 Atsushi Togo
# All rights reserved.
#
# This file is part of phonopy.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the phonopy project nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# from Wikipedia http://en.wikipedia.org/wiki/List_of_character_tables_for_chemically_important_3D_point_groups
character_table = {
# C1 (1)
'1':
[{'rotation_list': ('E'),
'character_table': {'A' : ( 1 )},
'mapping_table': {'E': ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),)}}],
# Ci (2)
'-1':
[{'rotation_list': ('E', 'i'),
'character_table': {'Ag' : ( 1, 1 ),
'Au' : ( 1,-1 )},
'mapping_table': {'E': ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'i': (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),)}}],
# C2 (3)
'2':
[{'rotation_list': ('E', 'C2'),
'character_table': {'A' : (1, 1),
'B' : (1, -1)},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C2': (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),)}}],
# C1v = Cs = C1h (4)
'm':
[{'rotation_list': ('E', 'sgh'),
'character_table': {'A\'' : ( 1, 1 ),
'A\'\'': ( 1,-1 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'sgh': ((( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),)}}],
# C2h (5)
'2/m':
[{'rotation_list': ('E', 'C2', 'i', 'sgh'),
'character_table': {'Ag': ( 1, 1, 1, 1 ),
'Bg': ( 1,-1, 1,-1 ),
'Au': ( 1, 1,-1,-1 ),
'Bu': ( 1,-1,-1, 1 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'sgh': ((( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),)}}],
# D2 (6)
'222':
[{'rotation_list': ('E', 'C2', 'C2x', 'C2y'),
'character_table': {'A' : ( 1, 1, 1, 1 ),
'B1': ( 1, 1,-1,-1 ),
'B2': ( 1,-1,-1, 1 ),
'B3': ( 1,-1, 1,-1 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'C2y': (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'C2x': ((( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),)}}],
# C2v (7)
'mm2':
[{'rotation_list': ('E', 'C2', 'sgvxz', 'sgvyz'),
'character_table': {'A1': ( 1, 1, 1, 1 ),
'A2': ( 1, 1,-1,-1 ),
'B1': ( 1,-1, 1,-1 ),
'B2': ( 1,-1,-1, 1 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'sgvxz': ((( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'sgvyz': (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),)}}],
# D2h (8)
'mmm':
[{'rotation_list':
('E', 'C2', 'C2x', 'C2y', 'i', 'sgxy', 'sgxz', 'sgyz'),
'character_table': {'Ag' : ( 1, 1, 1, 1, 1, 1, 1, 1 ),
'B1g': ( 1, 1,-1,-1, 1, 1,-1,-1 ),
'B2g': ( 1,-1,-1, 1, 1,-1, 1,-1 ),
'B3g': ( 1,-1, 1,-1, 1,-1,-1, 1 ),
'Au' : ( 1, 1, 1, 1,-1,-1,-1,-1 ),
'B1u': ( 1, 1,-1,-1,-1,-1, 1, 1 ),
'B2u': ( 1,-1,-1, 1,-1, 1,-1, 1 ),
'B3u': ( 1,-1, 1,-1,-1, 1, 1,-1 ) },
'mapping_table': { 'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'C2y' : (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'C2x' : ((( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'sgxy': ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'sgxz': ((( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'sgyz': (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),)}}],
# C4 (9)
'4':
[{'rotation_list': ('E', 'C4', 'C2'),
'character_table': {'A': ( 1, 1, 1 ),
'B': ( 1,-1, 1 ),
'E': ( 2, 0,-2 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C4': ((( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2': (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),)}}],
# S4 (10)
'-4':
[{'rotation_list': ('E', 'S4', 'C2'),
'character_table': {'A': ( 1, 1, 1 ),
'B': ( 1,-1, 1 ),
'E': ( 2, 0,-2 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C2': (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'S4': ((( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),)}}],
# C4h (11)
'4/m':
[{'rotation_list': ('E', 'C4', 'C2', 'i', 'S4', 'sgh'),
'character_table': {'Ag': ( 1, 1, 1, 1, 1, 1 ),
'Bg': ( 1,-1, 1, 1,-1, 1 ),
'Eg': ( 2, 0,-2, 2, 0,-2 ),
'Au': ( 1, 1, 1,-1,-1,-1 ),
'Bu': ( 1,-1, 1,-1, 1,-1 ),
'Eu': ( 2, 0,-2,-2, 0, 2 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C4' : ((( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'S4' : ((( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, -1 ),),
(( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),),
'sgh': ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),)}}],
# D4 (12)
'422':
[{'rotation_list': ('E', 'C4', 'C2', 'C2\'', 'C2\'\''),
'character_table': {'A1': ( 1, 1, 1, 1, 1 ),
'A2': ( 1, 1, 1,-1,-1 ),
'B1': ( 1,-1, 1, 1,-1 ),
'B2': ( 1,-1, 1,-1, 1 ),
'E' : ( 2, 0,-2, 0, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C4' : ((( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'C2\'' : ((( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'C2\'\'': ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),)}}],
# C4v (13)
'4mm':
[{'rotation_list': ('E', 'C4', 'C2', 'sgv', 'sgd'),
'character_table': {'A1': ( 1, 1, 1, 1, 1 ),
'A2': ( 1, 1, 1,-1,-1 ),
'B1': ( 1,-1, 1, 1,-1 ),
'B2': ( 1,-1, 1,-1, 1 ),
'E' : ( 2, 0,-2, 0, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C4' : ((( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'sgv': (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'sgd': ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),)}}],
# D2d (14)
'-42m':
[{'rotation_list': ('E', 'S4', 'C2z', 'C2\'', 'sgd'),
'character_table': {'A1': ( 1, 1, 1, 1, 1 ),
'A2': ( 1, 1, 1,-1,-1 ),
'B1': ( 1,-1, 1, 1,-1 ),
'B2': ( 1,-1, 1,-1, 1 ),
'E': ( 2, 0,-2, 0, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'S4' : ((( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),),
'C2z' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'C2\'': (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),
(( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'sgd': ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),)}},
{'rotation_list': ('E', 'S4', 'C2z', 'C2\'\'', 'sgv'),
'character_table': {'A1': ( 1, 1, 1, 1, 1 ),
'A2': ( 1, 1, 1,-1,-1 ),
'B1': ( 1,-1, 1, 1,-1 ),
'B2': ( 1,-1, 1,-1, 1 ),
'E': ( 2, 0,-2, 0, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'S4' : ((( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),),
'C2z' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'C2\'\'': ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 ) ),
(( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),),
'sgv' : (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),)}}],
# D4h (15)
'4/mmm':
[{'rotation_list': ('E', 'C4', 'C2', 'C2\'', 'C2\'\'', 'i',
'S4', 'sgh', 'sgv', 'sgd'),
'character_table': {'A1g': ( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ),
'A2g': ( 1, 1, 1,-1,-1, 1, 1, 1,-1,-1 ),
'B1g': ( 1,-1, 1, 1,-1, 1,-1, 1, 1,-1 ),
'B2g': ( 1,-1, 1,-1, 1, 1,-1, 1,-1, 1 ),
'Eg' : ( 2, 0,-2, 0, 0, 2, 0,-2, 0, 0 ),
'A1u': ( 1, 1, 1, 1, 1,-1,-1,-1,-1,-1 ),
'A2u': ( 1, 1, 1,-1,-1,-1,-1,-1, 1, 1 ),
'B1u': ( 1,-1, 1, 1,-1,-1, 1,-1,-1, 1 ),
'B2u': ( 1,-1, 1,-1, 1,-1, 1,-1, 1,-1 ),
'Eu' : ( 2, 0,-2, 0, 0,-2, 0, 2, 0, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C4' : ((( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 ) ),
( ( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'C2\'' : ((( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'C2\'\'': ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 ) ),
(( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'S4' : ((( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),),
'sgh' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'sgv' : (((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'sgd' : ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),)}}],
# C3 (16)
'3':
[{'rotation_list': ('E', 'C3'),
'character_table': {'A': ( 1, 1 ),
'E': ( 2,-1 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C3': ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),)}}],
# S6 (17)
'-3':
[{'rotation_list': ('E', 'C3', 'i', 'S6'),
'character_table': {'Ag': ( 1, 1, 1, 1 ),
'Eg': ( 2,-1, 2,-1 ),
'Au': ( 1, 1,-1,-1 ),
'Eu': ( 2,-1,-2, 1 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C3': ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'S6': ((( 0, 1, 0 ),
(-1, 1, 0 ),
( 0, 0,-1 )),
(( 1,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )))}}],
# D3 (18)
'32':
[{'rotation_list': ('E', 'C3', 'C2\''),
'character_table': {'A1': ( 1, 1, 1 ),
'A2': ( 1, 1,-1 ),
'E' : ( 2,-1, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2\'': ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),
(( 1,-1, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
(-1, 1, 0 ),
( 0, 0,-1 )),)}}],
# C3v (19)
'3m':
[{'rotation_list': ('E', 'C3', 'sgv'),
'character_table': {'A1': (1, 1, 1),
'A2': (1, 1,-1),
'E' : (2,-1, 0)},
'mapping_table': {'E' : ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0, 1)),),
'C3' : ((( 0,-1, 0),
( 1,-1, 0),
( 0, 0, 1)),
((-1, 1, 0),
(-1, 0, 0),
( 0, 0, 1)),),
'sgv' : ((( 0,-1, 0),
(-1, 0, 0),
( 0, 0, 1)),
((-1, 1, 0),
( 0, 1, 0),
( 0, 0, 1)),
(( 1, 0, 0),
( 1,-1, 0),
( 0, 0, 1)),)}}],
# D3d (20)
'-3m':
[{'rotation_list': ('E', 'C3', 'C2\'', 'i', 'S6', 'sgd'),
'character_table': {'A1g': ( 1, 1, 1, 1, 1, 1 ),
'A2g': ( 1, 1,-1, 1, 1,-1 ),
'Eg' : ( 2,-1, 0, 2,-1, 0 ),
'A1u': ( 1, 1, 1,-1,-1,-1 ),
'A2u': ( 1, 1,-1,-1,-1, 1 ),
'Eu' : ( 2,-1, 0,-2, 1, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2\'': ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),
(( 1,-1, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
(-1, 1, 0 ),
( 0, 0,-1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'S6' : ((( 0, 1, 0 ),
(-1, 1, 0 ),
( 0, 0,-1 )),
(( 1,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),),
'sgd' : ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),)}},
{'rotation_list': ('E', 'C3', 'C2\'', 'i', 'S6', 'sgd'),
'character_table': {'A1g': ( 1, 1, 1, 1, 1, 1 ),
'A2g': ( 1, 1,-1, 1, 1,-1 ),
'Eg' : ( 2,-1, 0, 2,-1, 0 ),
'A1u': ( 1, 1, 1,-1,-1,-1 ),
'A2u': ( 1, 1,-1,-1,-1, 1 ),
'Eu' : ( 2,-1, 0,-2, 1, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2\'': ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
((-1, 1, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),
(( 1, 0, 0 ),
( 1,-1, 0 ),
( 0, 0,-1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'S6' : ((( 0, 1, 0 ),
(-1, 1, 0 ),
( 0, 0,-1 )),
(( 1,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),),
'sgd' : ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 1,-1, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),
((-1, 0, 0 ),
(-1, 1, 0 ),
( 0, 0, 1 )),)}}],
# C6 (21)
'6':
[{'rotation_list': ('E', 'C6', 'C3', 'C2'),
'character_table': {'A' : ( 1, 1, 1, 1),
'B' : ( 1,-1, 1,-1),
'E1': ( 2, 1,-1,-2),
'E2': ( 2,-1,-1, 2)},
'mapping_table': {'E' : ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0, 1)),),
'C6' : ((( 1,-1, 0),
( 1, 0, 0),
( 0, 0, 1)),
(( 0, 1, 0),
(-1, 1, 0),
( 0, 0, 1)),),
'C3' : ((( 0,-1, 0),
( 1,-1, 0),
( 0, 0, 1)),
((-1, 1, 0),
(-1, 0, 0),
( 0, 0, 1)),),
'C2' : (((-1, 0, 0),
( 0,-1, 0),
( 0, 0, 1)),)}}],
# C3h (22)
'-6':
[{'rotation_list': ('E', 'C3', 'sgh', 'S3'),
'character_table': {'A\'' : ( 1, 1, 1, 1),
'E\'' : ( 2,-1, 2,-1),
'A\'\'' : ( 1, 1,-1,-1),
'E\'\'' : ( 2,-1,-2, 1)},
'mapping_table': {'E' : ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0, 1)),),
'C3' : ((( 0,-1, 0),
( 1,-1, 0),
( 0, 0, 1)),
((-1, 1, 0),
(-1, 0, 0),
( 0, 0, 1)),),
'sgh' : ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0,-1)),),
'S3' : (((-1, 1, 0),
(-1, 0, 0),
( 0, 0,-1)),
(( 0,-1, 0),
( 1,-1, 0),
( 0, 0,-1)),)}}],
# C6h (23)
'6/m':
[{'rotation_list': ('E', 'C6', 'C3', 'C2', 'i', 'S3', 'S6', 'sgh'),
'character_table': {'Ag': ( 1, 1, 1, 1, 1, 1, 1, 1 ),
'Bg': ( 1,-1, 1,-1, 1,-1, 1,-1 ),
'E1g': ( 2, 1,-1,-2, 2, 1,-1,-2 ),
'E2g': ( 2,-1,-1, 2, 2,-1,-1, 2 ),
'Au': ( 1, 1, 1, 1,-1,-1,-1,-1 ),
'Bu': ( 1,-1, 1,-1,-1, 1,-1, 1 ),
'E1u': ( 2, 1,-1,-2,-2,-1, 1, 2 ),
'E2u': ( 2,-1,-1, 2,-2, 1, 1,-2 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C6' : ((( 1,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
(-1, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'S3' : (((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0,-1 )),),
'S6' : ((( 0, 1, 0 ),
(-1, 1, 0 ),
( 0, 0,-1 )),
(( 1,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),),
'sgh' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),)}}],
# D6 (24)
'622':
[{'rotation_list': ('E', 'C6', 'C3', 'C2', 'C2\'', 'C2\'\''),
'character_table': {'A1' : ( 1, 1, 1, 1, 1, 1),
'A2' : ( 1, 1, 1, 1,-1,-1),
'B1' : ( 1,-1, 1,-1, 1,-1),
'B2' : ( 1,-1, 1,-1,-1, 1),
'E1' : ( 2, 1,-1,-2, 0, 0),
'E2' : ( 2,-1,-1, 2, 0, 0)},
'mapping_table': { 'E' : ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0, 1)),),
'C6' : ((( 1,-1, 0),
( 1, 0, 0),
( 0, 0, 1)),
(( 0, 1, 0),
(-1, 1, 0),
( 0, 0, 1)),),
'C3' : ((( 0,-1, 0),
( 1,-1, 0),
( 0, 0, 1)),
((-1, 1, 0),
(-1, 0, 0),
( 0, 0, 1)),),
'C2' : (((-1, 0, 0),
( 0,-1, 0),
( 0, 0, 1)),),
'C2\'' : ((( 0, 1, 0),
( 1, 0, 0),
( 0, 0,-1)),
(( 1,-1, 0),
( 0,-1, 0),
( 0, 0,-1)),
((-1, 0, 0),
(-1, 1, 0),
( 0, 0,-1)),),
'C2\'\'' : ((( 0,-1, 0),
(-1, 0, 0),
( 0, 0,-1)),
((-1, 1, 0),
( 0, 1, 0),
( 0, 0,-1)),
(( 1, 0, 0),
( 1,-1, 0),
( 0, 0,-1)),)}}],
# C6v (25)
'6mm':
[{'rotation_list': ('E', 'C6', 'C3', 'C2', 'sgv', 'sgd'),
'character_table': {'A1': ( 1, 1, 1, 1, 1, 1 ),
'A2': ( 1, 1, 1, 1,-1,-1 ),
'B1': ( 1,-1, 1,-1, 1,-1 ),
'B2': ( 1,-1, 1,-1,-1, 1 ),
'E1': ( 2, 1,-1,-2, 0, 0 ),
'E2': ( 2,-1,-1, 2, 0, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C6' : ((( 1,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
(-1, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'sgv': ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),),
'sgd': ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 1,-1, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),
((-1, 0, 0 ),
(-1, 1, 0 ),
( 0, 0, 1 )),)}}],
# D3h (26)
'-6m2':
[{'rotation_list': ('E', 'C3', 'C\'2', 'sgh', 'S3', 'sgv'),
'character_table': {'A1\'' : ( 1, 1, 1, 1, 1, 1 ),
'A2\'' : ( 1, 1,-1, 1, 1,-1 ),
'E\'' : ( 2,-1, 0, 2,-1, 0 ),
'A1\'\'': ( 1, 1, 1,-1,-1,-1 ),
'A2\'\'': ( 1, 1,-1,-1,-1, 1 ),
'E\'\'' : ( 2,-1, 0,-2, 1, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C\'2': ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 1, 0, 0 ),
( 1,-1, 0 ),
( 0, 0,-1 )),
((-1, 1, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'sgh' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'S3' : ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0,-1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),),
'sgv' : ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),)}}],
# D6h (27)
'6/mmm':
[{'rotation_list': ('E', 'C6', 'C3', 'C2', 'C2\'', 'C2\'\'',
'i', 'S3', 'S6', 'sgh', 'sgd', 'sgv'),
'character_table': {'A1g': ( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ),
'A2g': ( 1, 1, 1, 1,-1,-1, 1, 1, 1, 1,-1,-1 ),
'B1g': ( 1,-1, 1,-1, 1,-1, 1,-1, 1,-1, 1,-1 ),
'B2g': ( 1,-1, 1,-1,-1, 1, 1,-1, 1,-1,-1, 1 ),
'E1g': ( 2, 1,-1,-2, 0, 0, 2, 1,-1,-2, 0, 0 ),
'E2g': ( 2,-1,-1, 2, 0, 0, 2,-1,-1, 2, 0, 0 ),
'A1u': ( 1, 1, 1, 1, 1, 1,-1,-1,-1,-1,-1,-1 ),
'A2u': ( 1, 1, 1, 1,-1,-1,-1,-1,-1,-1, 1, 1 ),
'B1u': ( 1,-1, 1,-1, 1,-1,-1, 1,-1, 1,-1, 1 ),
'B2u': ( 1,-1, 1,-1,-1, 1,-1, 1,-1, 1, 1,-1 ),
'E1u': ( 2, 1,-1,-2, 0, 0,-2,-1, 1, 2, 0, 0 ),
'E2u': ( 2,-1,-1, 2, 0, 0,-2, 1, 1,-2, 0, 0 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C6' : ((( 1,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
(-1, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'C2\'' : ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
((-1, 1, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),
(( 1, 0, 0 ),
( 1,-1, 0 ),
( 0, 0,-1 )),),
'C2\'\'': ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),
(( 1,-1, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
(-1, 1, 0 ),
( 0, 0,-1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'S3' : (((-1, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
( 1,-1, 0 ),
( 0, 0,-1 )),),
'S6' : ((( 0, 1, 0 ),
(-1, 1, 0 ),
( 0, 0,-1 )),
(( 1,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),),
'sgh' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'sgd' : ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 1,-1, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),
((-1, 0, 0 ),
(-1, 1, 0 ),
( 0, 0, 1 )),),
'sgv' : ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),
((-1, 1, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 1,-1, 0 ),
( 0, 0, 1 )),)}}],
# T (28)
'23':
[{'rotation_list': ('E', 'C3', 'C2'),
'character_table': {'A' : (1, 1, 1),
'E' : (2,-1, 2),
'T' : (3, 0,-1)},
'mapping_table': {'E' : ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0, 1)),),
'C3' : ((( 0, 0, 1),
( 1, 0, 0),
( 0, 1, 0)),
(( 0, 0, 1),
(-1, 0, 0),
( 0,-1, 0)),
(( 0, 0,-1),
(-1, 0, 0),
( 0, 1, 0)),
(( 0, 0,-1),
( 1, 0, 0),
( 0,-1, 0)),
(( 0, 1, 0),
( 0, 0, 1),
( 1, 0, 0)),
(( 0,-1, 0),
( 0, 0, 1),
(-1, 0, 0)),
(( 0, 1, 0),
( 0, 0,-1),
(-1, 0, 0)),
(( 0,-1, 0),
( 0, 0,-1),
( 1, 0, 0)),),
'C2' : (((-1, 0, 0),
( 0,-1, 0),
( 0, 0, 1)),
((-1, 0, 0),
( 0, 1, 0),
( 0, 0,-1)),
(( 1, 0, 0),
( 0,-1, 0),
( 0, 0,-1)),)}}],
# Th (29)
'm-3':
[{'rotation_list': ('E', 'C3', 'C2', 'i', 'S6', 'sgh'),
'character_table': {'Ag' : ( 1, 1, 1, 1, 1, 1),
'Au' : ( 1, 1, 1,-1,-1,-1),
'Eg' : ( 2,-1, 2, 2,-1, 2),
'Eu' : ( 2,-1, 2,-2, 1,-2),
'Tg' : ( 3, 0,-1, 3, 0,-1),
'Tu' : ( 3, 0,-1,-3, 0, 1)},
'mapping_table': {'E' : ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0, 1)),),
'C3' : ((( 0, 0, 1),
( 1, 0, 0),
( 0, 1, 0)),
(( 0, 0, 1),
(-1, 0, 0),
( 0,-1, 0)),
(( 0, 0,-1),
(-1, 0, 0),
( 0, 1, 0)),
(( 0, 0,-1),
( 1, 0, 0),
( 0,-1, 0)),
(( 0, 1, 0),
( 0, 0, 1),
( 1, 0, 0)),
(( 0,-1, 0),
( 0, 0, 1),
(-1, 0, 0)),
(( 0, 1, 0),
( 0, 0,-1),
(-1, 0, 0)),
(( 0,-1, 0),
( 0, 0,-1),
( 1, 0, 0)),),
'C2' : (((-1, 0, 0),
( 0,-1, 0),
( 0, 0, 1)),
((-1, 0, 0),
( 0, 1, 0),
( 0, 0,-1)),
(( 1, 0, 0),
( 0,-1, 0),
( 0, 0,-1)),),
'i' : (((-1, 0, 0),
( 0,-1, 0),
( 0, 0,-1)),),
'S6' : ((( 0, 0,-1),
(-1, 0, 0),
( 0,-1, 0)),
(( 0, 0,-1),
( 1, 0, 0),
( 0, 1, 0)),
(( 0, 0, 1),
( 1, 0, 0),
( 0,-1, 0)),
(( 0, 0, 1),
(-1, 0, 0),
( 0, 1, 0)),
(( 0,-1, 0),
( 0, 0,-1),
(-1, 0, 0)),
(( 0, 1, 0),
( 0, 0,-1),
( 1, 0, 0)),
(( 0,-1, 0),
( 0, 0, 1),
( 1, 0, 0)),
(( 0, 1, 0),
( 0, 0, 1),
(-1, 0, 0)),),
'sgh': ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0,-1)),
(( 1, 0, 0),
( 0,-1, 0),
( 0, 0, 1)),
((-1, 0, 0),
( 0, 1, 0),
( 0, 0, 1)),)}}],
# O (30)
'432':
[{'rotation_list': ('E', 'C4', 'C2', 'C3', 'C2\''),
'character_table': {'A1' : ( 1, 1, 1, 1, 1),
'A2' : ( 1,-1, 1, 1,-1),
'E' : ( 2, 0, 2,-1, 0),
'T1' : ( 3, 1,-1, 0,-1),
'T2' : ( 3,-1,-1, 0, 1)},
'mapping_table': {'E' : ((( 1, 0, 0),
( 0, 1, 0),
( 0, 0, 1)),),
'C4' : ((( 0, 1, 0),
(-1, 0, 0),
( 0, 0, 1)),
(( 0,-1, 0),
( 1, 0, 0),
( 0, 0, 1)),
(( 1, 0, 0),
( 0, 0, 1),
( 0,-1, 0)),
(( 1, 0, 0),
( 0, 0,-1),
( 0, 1, 0)),
(( 0, 0, 1),
( 0, 1, 0),
(-1, 0, 0)),
(( 0, 0,-1),
( 0, 1, 0),
( 1, 0, 0)),),
'C2' : (((-1, 0, 0),
( 0,-1, 0),
( 0, 0, 1)),
((-1, 0, 0),
( 0, 1, 0),
( 0, 0,-1)),
(( 1, 0, 0),
( 0,-1, 0),
( 0, 0,-1)),),
'C3' : ((( 0, 0, 1),
( 1, 0, 0),
( 0, 1, 0)),
(( 0, 0, 1),
(-1, 0, 0),
( 0,-1, 0)),
(( 0, 0,-1),
(-1, 0, 0),
( 0, 1, 0)),
(( 0, 0,-1),
( 1, 0, 0),
( 0,-1, 0)),
(( 0, 1, 0),
( 0, 0, 1),
( 1, 0, 0)),
(( 0,-1, 0),
( 0, 0, 1),
(-1, 0, 0)),
(( 0, 1, 0),
( 0, 0,-1),
(-1, 0, 0)),
(( 0,-1, 0),
( 0, 0,-1),
( 1, 0, 0)),),
'C2\'': ((( 0,-1, 0),
(-1, 0, 0),
( 0, 0,-1)),
(( 0, 1, 0),
( 1, 0, 0),
( 0, 0,-1)),
((-1, 0, 0),
( 0, 0, 1),
( 0, 1, 0)),
((-1, 0, 0),
( 0, 0,-1),
( 0,-1, 0)),
(( 0, 0, 1),
( 0,-1, 0),
( 1, 0, 0)),
(( 0, 0,-1),
( 0,-1, 0),
(-1, 0, 0)),)}}],
# Td (31)
'-43m':
[{'rotation_list': ('E', 'C3', 'C2', 'S4', 'sgd'),
'character_table': {'A1': ( 1, 1, 1, 1, 1 ),
'A2': ( 1, 1, 1,-1,-1 ),
'E' : ( 2,-1, 2, 0, 0 ),
'T1': ( 3, 0,-1, 1,-1 ),
'T2': ( 3, 0,-1,-1, 1 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0, 0, 1 ),
( 1, 0, 0 ),
( 0, 1, 0 )),
(( 0, 1, 0 ),
( 0, 0, 1 ),
( 1, 0, 0 )),
(( 0, 0,-1 ),
( 1, 0, 0 ),
( 0,-1, 0 )),
(( 0,-1, 0 ),
( 0, 0,-1 ),
( 1, 0, 0 )),
(( 0, 0,-1 ),
(-1, 0, 0 ),
( 0, 1, 0 )),
(( 0, 1, 0 ),
( 0, 0,-1 ),
(-1, 0, 0 )),
(( 0, 0, 1 ),
(-1, 0, 0 ),
( 0,-1, 0 )),
(( 0,-1, 0 ),
( 0, 0, 1 ),
(-1, 0, 0 )),),
'C2' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'S4' : ((( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 0, 1 ),
( 0,-1, 0 )),
((-1, 0, 0 ),
( 0, 0,-1 ),
( 0, 1, 0 )),
(( 0, 0,-1 ),
( 0,-1, 0 ),
( 1, 0, 0 )),
(( 0, 0, 1 ),
( 0,-1, 0 ),
(-1, 0, 0 )),),
'sgd': ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0, 0,-1 ),
( 0,-1, 0 )),
(( 0, 0,-1 ),
( 0, 1, 0 ),
(-1, 0, 0 )),
(( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0, 0, 1 ),
( 0, 1, 0 )),
(( 0, 0, 1 ),
( 0, 1, 0 ),
( 1, 0, 0 )),)}}],
# Oh (32)
'm-3m':
[{'rotation_list': ('E', 'C3', 'C2', 'C4', 'C4^2', 'i',
'S4', 'S6', 'sgh', 'sgd'),
'character_table': {'A1g': ( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ),
'A2g': ( 1, 1,-1,-1, 1, 1,-1, 1, 1,-1 ),
'Eg' : ( 2,-1, 0, 0, 2, 2, 0,-1, 2, 0 ),
'T1g': ( 3, 0,-1, 1,-1, 3, 1, 0,-1,-1 ),
'T2g': ( 3, 0, 1,-1,-1, 3,-1, 0,-1, 1 ),
'A1u': ( 1, 1, 1, 1, 1,-1,-1,-1,-1,-1 ),
'A2u': ( 1, 1,-1,-1, 1,-1, 1,-1,-1, 1 ),
'Eu' : ( 2,-1, 0, 0, 2,-2, 0, 1,-2, 0 ),
'T1u': ( 3, 0,-1, 1,-1,-3,-1, 0, 1, 1 ),
'T2u': ( 3, 0, 1,-1,-1,-3, 1, 0, 1,-1 )},
'mapping_table': {'E' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),),
'C3' : ((( 0, 0, 1 ),
( 1, 0, 0 ),
( 0, 1, 0 )),
(( 0, 1, 0 ),
( 0, 0, 1 ),
( 1, 0, 0 )),
(( 0, 0,-1 ),
( 1, 0, 0 ),
( 0,-1, 0 )),
(( 0,-1, 0 ),
( 0, 0,-1 ),
( 1, 0, 0 )),
(( 0, 0,-1 ),
(-1, 0, 0 ),
( 0, 1, 0 )),
(( 0, 1, 0 ),
( 0, 0,-1 ),
(-1, 0, 0 )),
(( 0, 0, 1 ),
(-1, 0, 0 ),
( 0,-1, 0 )),
(( 0,-1, 0 ),
( 0, 0, 1 ),
(-1, 0, 0 )),),
'C2' : ((( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 0, 1 ),
( 0, 1, 0 )),
(( 0, 0, 1 ),
( 0,-1, 0 ),
( 1, 0, 0 )),
(( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 0,-1 ),
( 0,-1, 0 )),
(( 0, 0,-1 ),
( 0,-1, 0 ),
(-1, 0, 0 )),),
'C4' : ((( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0, 0,-1 ),
( 0, 1, 0 )),
(( 1, 0, 0 ),
( 0, 0, 1 ),
( 0,-1, 0 )),
(( 0, 0, 1 ),
( 0, 1, 0 ),
(-1, 0, 0 )),
(( 0, 0,-1 ),
( 0, 1, 0 ),
( 1, 0, 0 )),),
'C4^2': (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),),
'i' : (((-1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0,-1 )),),
'S4' : ((( 0, 1, 0 ),
(-1, 0, 0 ),
( 0, 0,-1 )),
(( 0,-1, 0 ),
( 1, 0, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 0, 1 ),
( 0,-1, 0 )),
((-1, 0, 0 ),
( 0, 0,-1 ),
( 0, 1, 0 )),
(( 0, 0,-1 ),
( 0,-1, 0 ),
( 1, 0, 0 )),
(( 0, 0, 1 ),
( 0,-1, 0 ),
(-1, 0, 0 )),),
'S6' : ((( 0, 0,-1 ),
(-1, 0, 0 ),
( 0,-1, 0 )),
(( 0,-1, 0 ),
( 0, 0,-1 ),
(-1, 0, 0 )),
(( 0, 0, 1 ),
(-1, 0, 0 ),
( 0, 1, 0 )),
(( 0, 1, 0 ),
( 0, 0, 1 ),
(-1, 0, 0 )),
(( 0, 0, 1 ),
( 1, 0, 0 ),
( 0,-1, 0 )),
(( 0,-1, 0 ),
( 0, 0, 1 ),
( 1, 0, 0 )),
(( 0, 0,-1 ),
( 1, 0, 0 ),
( 0, 1, 0 )),
(( 0, 1, 0 ),
( 0, 0,-1 ),
( 1, 0, 0 )),),
'sgh' : ((( 1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0,-1 )),
((-1, 0, 0 ),
( 0, 1, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0,-1, 0 ),
( 0, 0, 1 )),),
'sgd' : ((( 0,-1, 0 ),
(-1, 0, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0, 0,-1 ),
( 0,-1, 0 )),
(( 0, 0,-1 ),
( 0, 1, 0 ),
(-1, 0, 0 )),
(( 0, 1, 0 ),
( 1, 0, 0 ),
( 0, 0, 1 )),
(( 1, 0, 0 ),
( 0, 0, 1 ),
( 0, 1, 0 )),
(( 0, 0, 1 ),
( 0, 1, 0 ),
( 1, 0, 0 )),)}}],
}
| 45.958393
| 111
| 0.127462
| 5,264
| 64,066
| 1.530015
| 0.051672
| 0.302458
| 0.232059
| 0.249317
| 0.800844
| 0.764092
| 0.741122
| 0.720139
| 0.701515
| 0.690837
| 0
| 0.236649
| 0.695736
| 64,066
| 1,393
| 112
| 45.991386
| 0.176525
| 0.029594
| 0
| 0.864969
| 0
| 0.009259
| 0.040868
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
49887d93423373702a76fa054bed6d25a9670a47
| 3,108
|
py
|
Python
|
plots_results.py
|
valko073/rl_insurance
|
74fe7dd328358152f2aca8b2fe55d2539e8838d9
|
[
"MIT"
] | null | null | null |
plots_results.py
|
valko073/rl_insurance
|
74fe7dd328358152f2aca8b2fe55d2539e8838d9
|
[
"MIT"
] | null | null | null |
plots_results.py
|
valko073/rl_insurance
|
74fe7dd328358152f2aca8b2fe55d2539e8838d9
|
[
"MIT"
] | null | null | null |
import neptune
from config import EnvConfig
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import pickle
%matplotlib qt
plt.rcParams.update({'font.size': 22})
comet_cfg = EnvConfig()
session = neptune.Session(api_token=comet_cfg.neptune_token)
project = session.get_project(project_qualified_name=comet_cfg.neptune_project_name)
experiments = project.get_experiments("GRID-293")
exp = experiments[0]
vals = exp.get_numeric_channels_values('avg_insurance_cost_0','num_insured_0','avg_insurance_cost_1',
'num_insured_1','avg_insurance_cost_2','num_insured_2')
plt.figure()
ax1 = plt.subplot(3,1,1)
vals['avg_insurance_cost_0'].plot(ax=ax1,label='Insurance price')
#plt.ylabel('Average insurance price over episode')
vals['num_insured_0'].plot(secondary_y=True,label='Insurance usage')
#plt.ylabel('Total insurance usage over episode')
plt.title('Insurance 1')
#ax1.legend(['Average insurance cost over episode','Total usage over episode'])
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax.get_legend_handles_labels()
plt.legend(h1+h2, l1+l2, loc=5)
ax2 = plt.subplot(3,1,2)
vals['avg_insurance_cost_1'].plot(ax=ax2,legend=False,label='Insurance 2')
plt.ylabel('Average insurance price over episode')
vals['num_insured_1'].plot(secondary_y=True,label='Insurance usage')
plt.ylabel('Total insurance usage over episode')
plt.title('Insurance 2')
ax3 = plt.subplot(3,1,3)
vals['avg_insurance_cost_2'].plot(ax=ax3,legend=False,label='Insurance 3')
#plt.ylabel('Average insurance price over episode')
vals['num_insured_2'].plot(secondary_y=True,label='Insurance usage')
#plt.ylabel('Total insurance usage over episode')
plt.title('Insurance 3')
experiments = project.get_experiments("GRID-502")
exp = experiments[0]
vals = exp.get_numeric_channels_values('avg_insurance_cost_0','num_insured_0','avg_insurance_cost_1',
'num_insured_1','avg_insurance_cost_2','num_insured_2')
plt.figure()
ax1 = plt.subplot(3,1,1)
vals['avg_insurance_cost_0'].plot(ax=ax1,label='Insurance price')
#plt.ylabel('Average insurance price over episode')
vals['num_insured_0'].plot(secondary_y=True,label='Insurance usage')
#plt.ylabel('Total insurance usage over episode')
plt.title('Insurance 1')
#ax1.legend(['Average insurance cost over episode','Total usage over episode'])
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax.get_legend_handles_labels()
plt.legend(h1+h2, l1+l2, loc=5)
ax2 = plt.subplot(3,1,2)
vals['avg_insurance_cost_1'].plot(ax=ax2,legend=False,label='Insurance 2')
plt.ylabel('Average insurance price over episode')
vals['num_insured_1'].plot(secondary_y=True,label='Insurance usage')
plt.ylabel('Total insurance usage over episode')
plt.title('Insurance 2')
ax3 = plt.subplot(3,1,3)
vals['avg_insurance_cost_2'].plot(ax=ax3,legend=False,label='Insurance 3')
#plt.ylabel('Average insurance price over episode')
vals['num_insured_2'].plot(secondary_y=True,label='Insurance usage')
#plt.ylabel('Total insurance usage over episode')
plt.title('Insurance 3')
| 38.85
| 101
| 0.761261
| 485
| 3,108
| 4.678351
| 0.164948
| 0.077567
| 0.084619
| 0.031732
| 0.857647
| 0.825915
| 0.825915
| 0.825915
| 0.825915
| 0.825915
| 0
| 0.034925
| 0.097169
| 3,108
| 80
| 102
| 38.85
| 0.773699
| 0.176319
| 0
| 0.75
| 0
| 0
| 0.310074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.125
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b8d1c9b26844eec160d219238a33dd9ecffd0c08
| 5,295
|
py
|
Python
|
tests/test_graphs.py
|
HTalarmo/neat-python
|
08f5093712deb1ccbdb31eda8fbc3de797ff50a8
|
[
"BSD-3-Clause"
] | 1,129
|
2015-10-01T20:06:37.000Z
|
2022-03-29T22:53:03.000Z
|
tests/test_graphs.py
|
HTalarmo/neat-python
|
08f5093712deb1ccbdb31eda8fbc3de797ff50a8
|
[
"BSD-3-Clause"
] | 225
|
2015-10-01T17:21:34.000Z
|
2022-03-07T13:29:50.000Z
|
tests/test_graphs.py
|
HTalarmo/neat-python
|
08f5093712deb1ccbdb31eda8fbc3de797ff50a8
|
[
"BSD-3-Clause"
] | 561
|
2015-11-14T23:44:52.000Z
|
2022-03-31T17:51:30.000Z
|
import random
from neat.graphs import creates_cycle, required_for_output, feed_forward_layers
def assert_almost_equal(x, y, tol):
assert abs(x - y) < tol, "{!r} !~= {!r}".format(x, y)
def test_creates_cycle():
assert creates_cycle([(0, 1), (1, 2), (2, 3)], (0, 0))
assert creates_cycle([(0, 1), (1, 2), (2, 3)], (1, 0))
assert not creates_cycle([(0, 1), (1, 2), (2, 3)], (0, 1))
assert creates_cycle([(0, 1), (1, 2), (2, 3)], (2, 0))
assert not creates_cycle([(0, 1), (1, 2), (2, 3)], (0, 2))
assert creates_cycle([(0, 1), (1, 2), (2, 3)], (3, 0))
assert not creates_cycle([(0, 1), (1, 2), (2, 3)], (0, 3))
assert creates_cycle([(0, 2), (1, 3), (2, 3), (4, 2)], (3, 4))
assert not creates_cycle([(0, 2), (1, 3), (2, 3), (4, 2)], (4, 3))
def test_required_for_output():
inputs = [0, 1]
outputs = [2]
connections = [(0, 2), (1, 2)]
required = required_for_output(inputs, outputs, connections)
assert {2} == required
inputs = [0, 1]
outputs = [2]
connections = [(0, 3), (1, 4), (3, 2), (4, 2)]
required = required_for_output(inputs, outputs, connections)
assert {2, 3, 4} == required
inputs = [0, 1]
outputs = [3]
connections = [(0, 2), (1, 2), (2, 3)]
required = required_for_output(inputs, outputs, connections)
assert {2, 3} == required
inputs = [0, 1]
outputs = [4]
connections = [(0, 2), (1, 2), (1, 3), (2, 3), (2, 4), (3, 4)]
required = required_for_output(inputs, outputs, connections)
assert {2, 3, 4} == required
inputs = [0, 1]
outputs = [4]
connections = [(0, 2), (1, 3), (2, 3), (3, 4), (4, 2)]
required = required_for_output(inputs, outputs, connections)
assert {2, 3, 4} == required
inputs = [0, 1]
outputs = [4]
connections = [(0, 2), (1, 2), (1, 3), (2, 3), (2, 4), (3, 4), (2, 5)]
required = required_for_output(inputs, outputs, connections)
assert {2, 3, 4} == required
def test_fuzz_required():
for _ in range(1000):
n_hidden = random.randint(10, 100)
n_in = random.randint(1, 10)
n_out = random.randint(1, 10)
nodes = list(set(random.randint(0, 1000) for _ in range(n_in + n_out + n_hidden)))
random.shuffle(nodes)
inputs = nodes[:n_in]
outputs = nodes[n_in:n_in + n_out]
connections = []
for _ in range(n_hidden * 2):
a = random.choice(nodes)
b = random.choice(nodes)
if a == b:
continue
if a in inputs and b in inputs:
continue
if a in outputs and b in outputs:
continue
connections.append((a, b))
required = required_for_output(inputs, outputs, connections)
for o in outputs:
assert o in required
def test_feed_forward_layers():
inputs = [0, 1]
outputs = [2]
connections = [(0, 2), (1, 2)]
layers = feed_forward_layers(inputs, outputs, connections)
assert [{2}] == layers
inputs = [0, 1]
outputs = [3]
connections = [(0, 2), (1, 2), (2, 3)]
layers = feed_forward_layers(inputs, outputs, connections)
assert [{2}, {3}] == layers
inputs = [0, 1]
outputs = [4]
connections = [(0, 2), (1, 2), (1, 3), (2, 3), (2, 4), (3, 4)]
layers = feed_forward_layers(inputs, outputs, connections)
assert [{2}, {3}, {4}] == layers
inputs = [0, 1, 2, 3]
outputs = [11, 12, 13]
connections = [(0, 4), (1, 4), (1, 5), (2, 5), (2, 6), (3, 6), (3, 7),
(4, 8), (5, 8), (5, 9), (5, 10), (6, 10), (6, 7),
(8, 11), (8, 12), (8, 9), (9, 10), (7, 10),
(10, 12), (10, 13)]
layers = feed_forward_layers(inputs, outputs, connections)
assert [{4, 5, 6}, {8, 7}, {9, 11}, {10}, {12, 13}] == layers
inputs = [0, 1, 2, 3]
outputs = [11, 12, 13]
connections = [(0, 4), (1, 4), (1, 5), (2, 5), (2, 6), (3, 6), (3, 7),
(4, 8), (5, 8), (5, 9), (5, 10), (6, 10), (6, 7),
(8, 11), (8, 12), (8, 9), (9, 10), (7, 10),
(10, 12), (10, 13),
(3, 14), (14, 15), (5, 16), (10, 16)]
layers = feed_forward_layers(inputs, outputs, connections)
assert [{4, 5, 6}, {8, 7}, {9, 11}, {10}, {12, 13}] == layers
def test_fuzz_feed_forward_layers():
for _ in range(1000):
n_hidden = random.randint(10, 100)
n_in = random.randint(1, 10)
n_out = random.randint(1, 10)
nodes = list(set(random.randint(0, 1000) for _ in range(n_in + n_out + n_hidden)))
random.shuffle(nodes)
inputs = nodes[:n_in]
outputs = nodes[n_in:n_in + n_out]
connections = []
for _ in range(n_hidden * 2):
a = random.choice(nodes)
b = random.choice(nodes)
if a == b:
continue
if a in inputs and b in inputs:
continue
if a in outputs and b in outputs:
continue
connections.append((a, b))
feed_forward_layers(inputs, outputs, connections)
if __name__ == '__main__':
test_creates_cycle()
test_required_for_output()
test_fuzz_required()
test_feed_forward_layers()
test_fuzz_feed_forward_layers()
| 32.888199
| 90
| 0.517658
| 771
| 5,295
| 3.417639
| 0.088197
| 0.018975
| 0.118406
| 0.125237
| 0.86186
| 0.829602
| 0.812903
| 0.783302
| 0.783302
| 0.725996
| 0
| 0.108544
| 0.297073
| 5,295
| 160
| 91
| 33.09375
| 0.599409
| 0
| 0
| 0.703125
| 0
| 0
| 0.003966
| 0
| 0
| 0
| 0
| 0
| 0.179688
| 1
| 0.046875
| false
| 0
| 0.015625
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7702db91fd736ccb3c2f32d2fa68420e779927a7
| 33,769
|
py
|
Python
|
custodian/qchem/tests/test_jobs.py
|
JaGeo/custodian
|
9e3bdcb51f6354282d8c12df95ff71b9338812bb
|
[
"MIT"
] | null | null | null |
custodian/qchem/tests/test_jobs.py
|
JaGeo/custodian
|
9e3bdcb51f6354282d8c12df95ff71b9338812bb
|
[
"MIT"
] | null | null | null |
custodian/qchem/tests/test_jobs.py
|
JaGeo/custodian
|
9e3bdcb51f6354282d8c12df95ff71b9338812bb
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import unicode_literals, division
import os
import shutil
from unittest import TestCase
try:
from unittest.mock import patch
except ImportError:
from mock import patch
import unittest
from custodian.qchem.jobs import QCJob
from pymatgen.io.qchem.inputs import QCInput
__author__ = "Samuel Blau"
__copyright__ = "Copyright 2018, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Samuel Blau"
__email__ = "samblau1@gmail.com"
__status__ = "Alpha"
__date__ = "6/6/18"
__credits__ = "Shyam Dwaraknath"
test_dir = os.path.join(
os.path.dirname(__file__), "..", "..", "..", "test_files", "qchem", "new_test_files"
)
scr_dir = os.path.join(test_dir, "scr")
cwd = os.getcwd()
class QCJobTest(TestCase):
def test_defaults(self):
with patch("custodian.qchem.jobs.shutil.copy") as copy_patch:
myjob = QCJob(qchem_command="qchem", max_cores=32)
self.assertEqual(myjob.current_command, " qchem -nt 32 mol.qin mol.qout")
myjob.setup()
self.assertEqual(copy_patch.call_args_list[0][0][0], "mol.qin")
self.assertEqual(copy_patch.call_args_list[0][0][1], "mol.qin.orig")
self.assertEqual(os.environ["QCSCRATCH"], os.getcwd())
self.assertEqual(os.environ["QCTHREADS"], "32")
self.assertEqual(os.environ["OMP_NUM_THREADS"], "32")
def test_not_defaults(self):
myjob = QCJob(
qchem_command="qchem -slurm",
multimode="mpi",
input_file="different.qin",
output_file="not_default.qout",
max_cores=12,
scratch_dir="/not/default/scratch/",
backup=False,
)
self.assertEqual(
myjob.current_command, " qchem -slurm -np 12 different.qin not_default.qout"
)
myjob.setup()
self.assertEqual(os.environ["QCSCRATCH"], "/not/default/scratch/")
def test_save_scratch(self):
with patch("custodian.qchem.jobs.shutil.copy") as copy_patch:
myjob = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
scratch_dir=os.getcwd(),
save_scratch=True,
save_name="freq_scratch",
)
self.assertEqual(
myjob.current_command,
" qchem -slurm -nt 32 mol.qin mol.qout freq_scratch",
)
myjob.setup()
self.assertEqual(copy_patch.call_args_list[0][0][0], "mol.qin")
self.assertEqual(copy_patch.call_args_list[0][0][1], "mol.qin.orig")
self.assertEqual(os.environ["QCSCRATCH"], os.getcwd())
self.assertEqual(os.environ["QCTHREADS"], "32")
self.assertEqual(os.environ["OMP_NUM_THREADS"], "32")
class OptFFTest(TestCase):
def setUp(self):
os.makedirs(scr_dir)
shutil.copyfile(
os.path.join(test_dir, "FF_working/test.qin"),
os.path.join(scr_dir, "test.qin"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_working/test.qout.opt_0"),
os.path.join(scr_dir, "test.qout.opt_0"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_working/test.qout.freq_0"),
os.path.join(scr_dir, "test.qout.freq_0"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_working/test.qout.opt_1"),
os.path.join(scr_dir, "test.qout.opt_1"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_working/test.qout.freq_1"),
os.path.join(scr_dir, "test.qout.freq_1"),
)
os.chdir(scr_dir)
def tearDown(self):
os.chdir(cwd)
shutil.rmtree(scr_dir)
def test_OptFF(self):
myjob = QCJob.opt_with_frequency_flattener(
qchem_command="qchem",
max_cores=32,
input_file="test.qin",
output_file="test.qout",
linked=False,
)
expected_next = QCJob(
qchem_command="qchem",
max_cores=32,
multimode="openmp",
input_file="test.qin",
output_file="test.qout",
suffix=".opt_0",
backup=True,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
expected_next = QCJob(
qchem_command="qchem",
max_cores=32,
multimode="openmp",
input_file="test.qin",
output_file="test.qout",
suffix=".freq_0",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "FF_working/test.qin.freq_0")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "test.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem",
max_cores=32,
multimode="openmp",
input_file="test.qin",
output_file="test.qout",
suffix=".opt_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "FF_working/test.qin.opt_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "test.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem",
max_cores=32,
multimode="openmp",
input_file="test.qin",
output_file="test.qout",
suffix=".freq_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "FF_working/test.qin.freq_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "test.qin")).as_dict(),
)
self.assertRaises(StopIteration, myjob.__next__)
class OptFFTest1(TestCase):
def setUp(self):
os.makedirs(scr_dir)
shutil.copyfile(
os.path.join(test_dir, "2620_complete/mol.qin.orig"),
os.path.join(scr_dir, "mol.qin"),
)
shutil.copyfile(
os.path.join(test_dir, "2620_complete/mol.qout.opt_0"),
os.path.join(scr_dir, "mol.qout.opt_0"),
)
os.chdir(scr_dir)
def tearDown(self):
os.chdir(cwd)
shutil.rmtree(scr_dir)
def test_OptFF(self):
myjob = QCJob.opt_with_frequency_flattener(
qchem_command="qchem -slurm",
max_cores=32,
input_file="mol.qin",
output_file="mol.qout",
linked=False,
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_0",
backup=True,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertRaises(StopIteration, myjob.__next__)
class OptFFTest2(TestCase):
def setUp(self):
os.makedirs(scr_dir)
shutil.copyfile(
os.path.join(test_dir, "disconnected_but_converged/mol.qin.orig"),
os.path.join(scr_dir, "mol.qin"),
)
shutil.copyfile(
os.path.join(test_dir, "disconnected_but_converged/mol.qout.opt_0"),
os.path.join(scr_dir, "mol.qout.opt_0"),
)
shutil.copyfile(
os.path.join(test_dir, "disconnected_but_converged/mol.qout.freq_0"),
os.path.join(scr_dir, "mol.qout.freq_0"),
)
os.chdir(scr_dir)
def tearDown(self):
os.chdir(cwd)
shutil.rmtree(scr_dir)
def test_OptFF(self):
myjob = QCJob.opt_with_frequency_flattener(
qchem_command="qchem -slurm",
max_cores=32,
input_file="mol.qin",
output_file="mol.qout",
linked=False,
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_0",
backup=True,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_0",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "disconnected_but_converged/mol.qin.freq_0")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
self.assertRaises(StopIteration, myjob.__next__)
class OptFFTestSwitching(TestCase):
def setUp(self):
os.makedirs(scr_dir)
shutil.copyfile(
os.path.join(test_dir, "FF_switching/mol.qin.orig"),
os.path.join(scr_dir, "mol.qin"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_switching/mol.qout.opt_0"),
os.path.join(scr_dir, "mol.qout.opt_0"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_switching/mol.qout.freq_0"),
os.path.join(scr_dir, "mol.qout.freq_0"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_switching/mol.qout.opt_1"),
os.path.join(scr_dir, "mol.qout.opt_1"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_switching/mol.qout.freq_1"),
os.path.join(scr_dir, "mol.qout.freq_1"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_switching/mol.qout.opt_2"),
os.path.join(scr_dir, "mol.qout.opt_2"),
)
shutil.copyfile(
os.path.join(test_dir, "FF_switching/mol.qout.freq_2"),
os.path.join(scr_dir, "mol.qout.freq_2"),
)
os.chdir(scr_dir)
def tearDown(self):
os.chdir(cwd)
shutil.rmtree(scr_dir)
def test_OptFF(self):
myjob = QCJob.opt_with_frequency_flattener(
qchem_command="qchem -slurm",
max_cores=32,
input_file="mol.qin",
output_file="mol.qout",
linked=False,
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_0",
backup=True,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_0",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "FF_switching/mol.qin.freq_0")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "FF_switching/mol.qin.opt_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "FF_switching/mol.qin.freq_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_2",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "FF_switching/mol.qin.opt_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_2",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "FF_switching/mol.qin.freq_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
self.assertRaises(StopIteration, myjob.__next__)
class OptFFTest6004(TestCase):
def setUp(self):
os.makedirs(scr_dir)
shutil.copyfile(
os.path.join(test_dir, "6004_frag12/mol.qin.orig"),
os.path.join(scr_dir, "mol.qin"),
)
shutil.copyfile(
os.path.join(test_dir, "6004_frag12/mol.qout.opt_0"),
os.path.join(scr_dir, "mol.qout.opt_0"),
)
shutil.copyfile(
os.path.join(test_dir, "6004_frag12/mol.qout.freq_0"),
os.path.join(scr_dir, "mol.qout.freq_0"),
)
shutil.copyfile(
os.path.join(test_dir, "6004_frag12/mol.qout.opt_1"),
os.path.join(scr_dir, "mol.qout.opt_1"),
)
shutil.copyfile(
os.path.join(test_dir, "6004_frag12/mol.qout.freq_1"),
os.path.join(scr_dir, "mol.qout.freq_1"),
)
shutil.copyfile(
os.path.join(test_dir, "6004_frag12/mol.qout.opt_2"),
os.path.join(scr_dir, "mol.qout.opt_2"),
)
shutil.copyfile(
os.path.join(test_dir, "6004_frag12/mol.qout.freq_2"),
os.path.join(scr_dir, "mol.qout.freq_2"),
)
os.chdir(scr_dir)
def tearDown(self):
os.chdir(cwd)
shutil.rmtree(scr_dir)
def test_OptFF(self):
myjob = QCJob.opt_with_frequency_flattener(
qchem_command="qchem -slurm",
max_cores=32,
input_file="mol.qin",
output_file="mol.qout",
linked=False,
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_0",
backup=True,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_0",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "6004_frag12/mol.qin.freq_0")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "6004_frag12/mol.qin.opt_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "6004_frag12/mol.qin.freq_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_2",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "6004_frag12/mol.qin.opt_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_2",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "6004_frag12/mol.qin.freq_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
class OptFFTest5952(TestCase):
def setUp(self):
os.makedirs(scr_dir)
shutil.copyfile(
os.path.join(test_dir, "5952_frag16/mol.qin.orig"),
os.path.join(scr_dir, "mol.qin"),
)
shutil.copyfile(
os.path.join(test_dir, "5952_frag16/mol.qout.opt_0"),
os.path.join(scr_dir, "mol.qout.opt_0"),
)
shutil.copyfile(
os.path.join(test_dir, "5952_frag16/mol.qout.freq_0"),
os.path.join(scr_dir, "mol.qout.freq_0"),
)
shutil.copyfile(
os.path.join(test_dir, "5952_frag16/mol.qout.opt_1"),
os.path.join(scr_dir, "mol.qout.opt_1"),
)
shutil.copyfile(
os.path.join(test_dir, "5952_frag16/mol.qout.freq_1"),
os.path.join(scr_dir, "mol.qout.freq_1"),
)
shutil.copyfile(
os.path.join(test_dir, "5952_frag16/mol.qout.opt_2"),
os.path.join(scr_dir, "mol.qout.opt_2"),
)
shutil.copyfile(
os.path.join(test_dir, "5952_frag16/mol.qout.freq_2"),
os.path.join(scr_dir, "mol.qout.freq_2"),
)
os.chdir(scr_dir)
def tearDown(self):
os.chdir(cwd)
shutil.rmtree(scr_dir)
def test_OptFF(self):
myjob = QCJob.opt_with_frequency_flattener(
qchem_command="qchem -slurm",
max_cores=32,
input_file="mol.qin",
output_file="mol.qout",
linked=False,
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_0",
backup=True,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_0",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5952_frag16/mol.qin.freq_0")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5952_frag16/mol.qin.opt_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5952_frag16/mol.qin.freq_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_2",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5952_frag16/mol.qin.opt_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_2",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5952_frag16/mol.qin.freq_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
self.assertRaises(Exception, myjob.__next__)
class OptFFTest5690(TestCase):
def setUp(self):
os.makedirs(scr_dir)
shutil.copyfile(
os.path.join(test_dir, "5690_frag18/mol.qin.orig"),
os.path.join(scr_dir, "mol.qin"),
)
shutil.copyfile(
os.path.join(test_dir, "5690_frag18/mol.qout.opt_0"),
os.path.join(scr_dir, "mol.qout.opt_0"),
)
shutil.copyfile(
os.path.join(test_dir, "5690_frag18/mol.qout.freq_0"),
os.path.join(scr_dir, "mol.qout.freq_0"),
)
shutil.copyfile(
os.path.join(test_dir, "5690_frag18/mol.qout.opt_1"),
os.path.join(scr_dir, "mol.qout.opt_1"),
)
shutil.copyfile(
os.path.join(test_dir, "5690_frag18/mol.qout.freq_1"),
os.path.join(scr_dir, "mol.qout.freq_1"),
)
shutil.copyfile(
os.path.join(test_dir, "5690_frag18/mol.qout.opt_2"),
os.path.join(scr_dir, "mol.qout.opt_2"),
)
shutil.copyfile(
os.path.join(test_dir, "5690_frag18/mol.qout.freq_2"),
os.path.join(scr_dir, "mol.qout.freq_2"),
)
os.chdir(scr_dir)
def tearDown(self):
os.chdir(cwd)
shutil.rmtree(scr_dir)
def test_OptFF(self):
myjob = QCJob.opt_with_frequency_flattener(
qchem_command="qchem -slurm",
max_cores=32,
input_file="mol.qin",
output_file="mol.qout",
linked=False,
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_0",
backup=True,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_0",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5690_frag18/mol.qin.freq_0")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5690_frag18/mol.qin.opt_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_1",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5690_frag18/mol.qin.freq_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_2",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5690_frag18/mol.qin.opt_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_2",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "5690_frag18/mol.qin.freq_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
self.assertRaises(Exception, myjob.__next__)
class OptFF_small_neg_freq(TestCase):
def setUp(self):
os.makedirs(scr_dir)
shutil.copyfile(
os.path.join(test_dir, "small_neg_freq/mol.qin.orig"),
os.path.join(scr_dir, "mol.qin"),
)
shutil.copyfile(
os.path.join(test_dir, "small_neg_freq/mol.qout.opt_0"),
os.path.join(scr_dir, "mol.qout.opt_0"),
)
shutil.copyfile(
os.path.join(test_dir, "small_neg_freq/mol.qout.freq_0"),
os.path.join(scr_dir, "mol.qout.freq_0"),
)
shutil.copyfile(
os.path.join(test_dir, "small_neg_freq/mol.qout.opt_1"),
os.path.join(scr_dir, "mol.qout.opt_1"),
)
shutil.copyfile(
os.path.join(test_dir, "small_neg_freq/mol.qout.freq_1"),
os.path.join(scr_dir, "mol.qout.freq_1"),
)
shutil.copyfile(
os.path.join(test_dir, "small_neg_freq/mol.qout.opt_2"),
os.path.join(scr_dir, "mol.qout.opt_2"),
)
shutil.copyfile(
os.path.join(test_dir, "small_neg_freq/mol.qout.freq_2"),
os.path.join(scr_dir, "mol.qout.freq_2"),
)
os.chdir(scr_dir)
def tearDown(self):
os.chdir(cwd)
shutil.rmtree(scr_dir)
def test_OptFF(self):
myjob = QCJob.opt_with_frequency_flattener(
qchem_command="qchem -slurm",
max_cores=32,
input_file="mol.qin",
output_file="mol.qout",
linked=True,
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_0",
scratch_dir=os.getcwd(),
save_scratch=True,
save_name="chain_scratch",
backup=True,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_0",
scratch_dir=os.getcwd(),
save_scratch=True,
save_name="chain_scratch",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "small_neg_freq/mol.qin.freq_0")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_1",
scratch_dir=os.getcwd(),
save_scratch=True,
save_name="chain_scratch",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "small_neg_freq/mol.qin.opt_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_1",
scratch_dir=os.getcwd(),
save_scratch=True,
save_name="chain_scratch",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "small_neg_freq/mol.qin.freq_1")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".opt_2",
scratch_dir=os.getcwd(),
save_scratch=True,
save_name="chain_scratch",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "small_neg_freq/mol.qin.opt_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
expected_next = QCJob(
qchem_command="qchem -slurm",
max_cores=32,
multimode="openmp",
input_file="mol.qin",
output_file="mol.qout",
suffix=".freq_2",
scratch_dir=os.getcwd(),
save_scratch=True,
save_name="chain_scratch",
backup=False,
).as_dict()
self.assertEqual(next(myjob).as_dict(), expected_next)
self.assertEqual(
QCInput.from_file(
os.path.join(test_dir, "small_neg_freq/mol.qin.freq_2")
).as_dict(),
QCInput.from_file(os.path.join(scr_dir, "mol.qin")).as_dict(),
)
self.assertRaises(StopIteration, myjob.__next__)
if __name__ == "__main__":
unittest.main()
| 34.213779
| 88
| 0.541295
| 3,995
| 33,769
| 4.317647
| 0.038298
| 0.052525
| 0.086962
| 0.060873
| 0.950142
| 0.946374
| 0.937446
| 0.930605
| 0.922662
| 0.916575
| 0
| 0.022457
| 0.324854
| 33,769
| 986
| 89
| 34.248479
| 0.734111
| 0.000385
| 0
| 0.770213
| 0
| 0
| 0.152249
| 0.062481
| 0
| 0
| 0
| 0
| 0.092553
| 1
| 0.028723
| false
| 0
| 0.010638
| 0
| 0.048936
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7721027662d0030de71b50cc783a6fda14e84c80
| 7,477
|
py
|
Python
|
py/programs/img_1.py
|
fpfaffendorf/pLauncher
|
e9d77019442d6d7bea45716cfd9c32358854d148
|
[
"Apache-2.0"
] | 1
|
2018-04-16T15:58:15.000Z
|
2018-04-16T15:58:15.000Z
|
py/programs/img_1.py
|
fpfaffendorf/pLauncher
|
e9d77019442d6d7bea45716cfd9c32358854d148
|
[
"Apache-2.0"
] | null | null | null |
py/programs/img_1.py
|
fpfaffendorf/pLauncher
|
e9d77019442d6d7bea45716cfd9c32358854d148
|
[
"Apache-2.0"
] | null | null | null |
# -------------------------------------------------------------------------------------------------------------------------------------------------------------
from __future__ import print_function
from lib import meeus
import sys
import math
# -------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------------------------------------------------------------------------------
# Main Program
# -------------------------------------------------------------------------------------------------------------------------------------------------------------
if len(sys.argv) == 2 and sys.argv[1] == "--help":
print ("IMG")
print ("Negative (1-Yes 0-No)")
else:
img = [
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x0F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE,
0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xC0, 0xFF, 0xFF, 0x1F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF0, 0x0F, 0x80, 0x7F, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFC, 0x00,
0x00, 0xF8, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x3F, 0x00, 0x00, 0xE0, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x0F, 0x00, 0x00, 0x80, 0x0F, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0x3B, 0xED,
0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0xE0, 0xF3, 0xF4, 0x03, 0x00, 0x3C, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0xF0, 0x4F, 0x1F, 0x03, 0x00, 0x78, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF8, 0xFD, 0x8F,
0x01, 0x00, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0xFC, 0xFD, 0x30, 0x00, 0x00, 0xFC, 0x01, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0x7F, 0x70, 0x01, 0x00, 0xBC, 0x03,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xF0,
0x01, 0xC0, 0xB0, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x80, 0xFF, 0xE7, 0xF9, 0x03, 0xE0, 0xF1, 0x07, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x80, 0xFF, 0xFF, 0xFF, 0x03, 0xA0, 0xFD, 0x0F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0xFF, 0x9F, 0x3F,
0x03, 0x00, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xC0, 0xFF, 0x1F, 0x0F, 0x00, 0x00, 0xFF, 0x1F, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xE0, 0xFF, 0x5F, 0x06, 0x00, 0x60, 0x97, 0x1F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE0, 0xFF, 0x7F, 0x03,
0x00, 0xE0, 0xA1, 0x3B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xF0, 0xFF, 0xFF, 0x01, 0x00, 0xE0, 0x20, 0x3D, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xF0, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0E, 0x70,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0xFF, 0x7F, 0x00,
0x00, 0xE0, 0x0F, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x38, 0xFD, 0x3F, 0x00, 0x00, 0xF0, 0xBF, 0x63, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x38, 0xFD, 0x20, 0x00, 0x00, 0xF0, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x7A, 0x00, 0x00,
0x00, 0xF8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x18, 0x7A, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x18, 0x38, 0x00, 0x00, 0x00, 0xFC, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x70, 0x02, 0x00,
0x00, 0xFC, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x1C, 0xE0, 0x07, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x0F, 0x00, 0x00, 0xFE, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x8C, 0x01,
0x00, 0xFC, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x1C, 0x00, 0xF8, 0x1F, 0x00, 0xF8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x1C, 0x00, 0xC0, 0x3F, 0x00, 0xF8, 0xFE, 0xFF,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1C, 0x00, 0xC0, 0xFF,
0x00, 0x00, 0xF0, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x18, 0x00, 0xE0, 0xFF, 0x00, 0x00, 0xF0, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0xF0, 0xFF, 0x01, 0x00, 0xF0, 0xFF,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0xF0, 0xFF,
0x1F, 0x00, 0xF0, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x38, 0x00, 0xF0, 0xFF, 0x3F, 0x00, 0xE0, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x38, 0x00, 0xE0, 0xFF, 0x3F, 0x00, 0xE0, 0x7F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x00, 0xC0, 0xFF,
0x3F, 0x00, 0xE0, 0x7F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x70, 0x00, 0xC0, 0xFF, 0x1F, 0x00, 0xE0, 0x7F, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x70, 0x00, 0x80, 0xFF, 0x1F, 0x00, 0xE0, 0x7F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0xFE,
0x1F, 0x00, 0xE0, 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xE0, 0x00, 0x00, 0xFE, 0x1F, 0x00, 0xE0, 0x3F, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xE0, 0x00, 0x00, 0xFE, 0x03, 0x00, 0xE0, 0x1F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0x01, 0x00, 0xFE,
0x01, 0x00, 0xE0, 0x1F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x80, 0x03, 0x00, 0xFE, 0x03, 0x00, 0xC0, 0x0F, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x80, 0x03, 0x00, 0xFE, 0x03, 0x00, 0x80, 0x0F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0xFE,
0x01, 0x00, 0x80, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x0F, 0x00, 0xFE, 0x00, 0x00, 0x80, 0x03, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x7E, 0x00, 0x00, 0xC0, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3C, 0x00, 0x1C,
0x00, 0x00, 0xE0, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x78, 0x00, 0x1C, 0x00, 0x00, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0xF0, 0x00, 0x1C, 0x00, 0x00, 0x78, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE0, 0x01, 0x3C,
0x00, 0x00, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0xC0, 0x07, 0x38, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1F, 0x30, 0x00, 0xC0, 0x07, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7E, 0x20,
0x00, 0xF0, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xF8, 0x03, 0x00, 0xFE, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE0, 0x7F, 0xF8, 0x3F, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0xFF,
0xFF, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0xF8, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00
]
if (sys.argv[1] == '1'):
for e in img:
print(chr(0xFF - e), end='')
else:
for e in img:
print(chr(e), end='')
| 65.587719
| 159
| 0.575364
| 1,082
| 7,477
| 3.971349
| 0.075786
| 1.046311
| 1.312544
| 1.496858
| 0.804748
| 0.790784
| 0.782872
| 0.782872
| 0.777286
| 0.761461
| 0
| 0.458714
| 0.194998
| 7,477
| 113
| 160
| 66.168142
| 0.255192
| 0.086131
| 0
| 0.058824
| 0
| 0
| 0.004541
| 0
| 0
| 0
| 0.600557
| 0
| 0
| 0
| null | null | 0
| 0.039216
| null | null | 0.04902
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
7722875eb3d303145946d452b9e441447b4fc27b
| 15,960
|
py
|
Python
|
compareSUB.py
|
ninja777/Utils
|
be3b6c0bca6c664b791b4703ad40e62cd4f21833
|
[
"MIT"
] | null | null | null |
compareSUB.py
|
ninja777/Utils
|
be3b6c0bca6c664b791b4703ad40e62cd4f21833
|
[
"MIT"
] | null | null | null |
compareSUB.py
|
ninja777/Utils
|
be3b6c0bca6c664b791b4703ad40e62cd4f21833
|
[
"MIT"
] | null | null | null |
import CompareEqlist as o
file1 = '../server16_ifiles/results/OUTPUTMLS-SUBSUMP.txt'
file2 = '../server17_ifiles/results/OUTPUTMLS-SUBSUMP.txt'
a = o.subSumptionClass(filename=file1)
a.parseFile()
b = o.subSumptionClass(filename=file2)
b.parseFile()
eqlistPresent = False
OperationPresent = False
OperandPresent = False
applyFilter = True
removedClasses = 0
addedClasses = 0
OperationsAdded = 0
OperationsRemoved = 0
replacedSMA = 0
SMAAdded = 0
SMARemoved = 0
replacedOps =0
allRead = True
allWrite = True
mix = True
def FilterOps(operations):
writec =0
readc = 0
if len(operations) == 0:
return 0,0,0
for op in operations:
opType = op.split('(')[0]
if opType == 'write':
writec = writec+1
else:
readc =readc+1
if writec == len(operations):
return 0,1,0
elif readc == len(operations):
return 1,0,0
else:
return 0,0,1
#if the first element matches then all other operands and ops should be present...
for aeqlists in a.eqList:
# print aeqlists
for beqlists in b.eqList:
#check if eqlist in a is present in b
if aeqlists[0].operand == beqlists[0].operand:
#check if all operations match..
if aeqlists[0].operations == beqlists[0].operations:
#eqclass match.. check for others.
eqlistPresent = True
# check for new operand in eqlist.. or new operation in some operands..
for aeq in aeqlists:
for beq in beqlists:
if aeq.operand == beq.operand:
OperandPresent = True
if aeq.operations == beq.operations:
OperationPresent = True
if OperandPresent == True and OperationPresent == False:
# operand present but same ops not present..:
for beq in beqlists:
if aeq.operand == beq.operand:
if aeq.operations != beq.operations:
if len(aeq.operations) < len(beq.operations):
if applyFilter:
addedops = list(set(aeq.operations) - set(beq.operations))
if FilterOps(aeqlists[0].operations) != FilterOps(addedops):
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "A : ", aeq.operations
print "B : ", beq.operations
SMAAdded = SMAAdded + 1
elif FilterOps(addedops)[2] == 1:
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "A : ", aeq.operations
print "B : ", beq.operations
SMAAdded = SMAAdded + 1
else:
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "A : ", aeq.operations
print "B : ", beq.operations
SMAAdded = SMAAdded + 1
elif len(beq.operations) < len(aeq.operations):
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "A : ", aeq.operations
print "B : ", beq.operations
SMARemoved = SMARemoved + 1
elif len(aeq.operations) == len(beq.operations):
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "A : ", aeq.operations
print "B : ", beq.operations
replacedSMA = replacedSMA + 1
SMARemoved = SMARemoved + 1
SMAAdded = SMAAdded + 1
# print "B : ", beq.operations
OperationPresent = False
if OperandPresent == False:
if applyFilter:
if FilterOps(aeqlists[0].operations) != FilterOps(aeq.operations):
OperationsRemoved = OperationsRemoved + 1
if len(aeqlists) <= len(beqlists):
replacedOps = replacedOps + 1
print "-----------"
print "New Operand in A: subsumed by: ", aeqlists[0].operand, aeqlists[0].operations
print aeq.operand, "--", aeq.operations
elif FilterOps(aeq.operations)[2] == 1:
OperationsRemoved = OperationsRemoved + 1
if len(aeqlists) <= len(beqlists):
replacedOps = replacedOps + 1
print "-----------"
print "New Operand in A: subsumed by: ", aeqlists[0].operand, aeqlists[0].operations
print aeq.operand, "--", aeq.operations
else:
OperationsRemoved = OperationsRemoved + 1
if len(aeqlists) <= len(beqlists):
replacedOps = replacedOps + 1
print "-----------"
print "New Operand in A: subsumed by: ", aeqlists[0].operand, aeqlists[0].operations
print aeq.operand, "--", aeq.operations
OperandPresent = False
if eqlistPresent == False:
if applyFilter:
if FilterOps(aeqlists[0].operations) != FilterOps(aeq.operations):
removedClasses = removedClasses + 1
print "-----------"
print "Subsumption only in A : "
for aeq in aeqlists:
print aeq.operand, "--", aeq.operations
elif FilterOps(aeq.operations)[2] == 1:
removedClasses = removedClasses + 1
print "-----------"
print "Subsumption only in A : "
for aeq in aeqlists:
print aeq.operand, "--", aeq.operations
else:
removedClasses = removedClasses + 1
print "-----------"
print "Subsumption only in A : "
for aeq in aeqlists:
print aeq.operand, "--", aeq.operations
eqlistPresent = False
for aeqlists in b.eqList:
# print aeqlists
for beqlists in a.eqList:
# check if eqlist in a is present in b
if aeqlists[0].operand == beqlists[0].operand:
# check if all operations match..
if aeqlists[0].operations == beqlists[0].operations:
# eqclass match.. check for others.
eqlistPresent = True
#check for new operand in eqlist.. or new operation in some operands..
for aeq in aeqlists:
for beq in beqlists:
if aeq.operand == beq.operand:
OperandPresent = True
if aeq.operations == beq.operations:
OperationPresent = True
if OperandPresent == True and OperationPresent == False:
#operand present but same ops not present..:
for beq in beqlists:
if aeq.operand == beq.operand:
if aeq.operations != beq.operations:
if len(aeq.operations) > len(beq.operations):
if applyFilter:
addedops = list(set(aeq.operations) - set(beq.operations))
if FilterOps(aeqlists[0].operations) != FilterOps(addedops):
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "B : ", aeq.operations
print "A : ", beq.operations
SMAAdded = SMAAdded + 1
elif FilterOps(addedops)[2] == 1:
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "B : ", aeq.operations
print "A : ", beq.operations
SMAAdded = SMAAdded + 1
else:
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "B : ", aeq.operations
print "A : ", beq.operations
SMAAdded = SMAAdded + 1
elif len(beq.operations) > len(aeq.operations):
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "B : ", aeq.operations
print "A : ", beq.operations
SMARemoved = SMARemoved + 1
elif len(aeq.operations) == len(beq.operations):
print "------------"
print "Mismatch operations for operand : ", aeq.operand, " in Subsumption of: ", \
aeqlists[0].operand, aeqlists[0].operations
print "B : ", aeq.operations
print "A : ", beq.operations
replacedSMA = replacedSMA + 1
SMARemoved = SMARemoved + 1
SMAAdded = SMAAdded + 1
#print "B : ", beq.operations
OperationPresent = False
if OperandPresent == False:
if applyFilter:
if FilterOps(aeqlists[0].operations) != FilterOps(aeq.operations):
OperationsAdded = OperationsAdded +1
if len(aeqlists) <= len(beqlists):
replacedOps = replacedOps + 1
print "-----------"
print "New Operand in B: subsumed by: ", aeqlists[0].operand, aeqlists[0].operations
print aeq.operand, "--", aeq.operations
elif FilterOps(aeq.operations)[2] == 1:
OperationsAdded = OperationsAdded + 1
if len(aeqlists) <= len(beqlists):
replacedOps = replacedOps + 1
print "-----------"
print "New Operand in B: subsumed by: ", aeqlists[0].operand, aeqlists[0].operations
print aeq.operand, "--", aeq.operations
else:
OperationsAdded = OperationsAdded + 1
if len(aeqlists) <= len(beqlists):
replacedOps = replacedOps + 1
print "-----------"
print "New Operand in B: subsumed by: ", aeqlists[0].operand, aeqlists[0].operations
print aeq.operand, "--", aeq.operations
OperandPresent = False
if eqlistPresent == False:
if applyFilter:
if FilterOps(aeqlists[0].operations) != FilterOps(aeq.operations):
addedClasses = addedClasses+1
print "-----------"
print "Subsumption only in B : "
for aeq in aeqlists:
print aeq.operand, "--", aeq.operations
elif FilterOps(aeq.operations)[2] == 1:
addedClasses = addedClasses + 1
print "-----------"
print "Subsumption only in B : "
for aeq in aeqlists:
print aeq.operand, "--", aeq.operations
else:
addedClasses = addedClasses + 1
print "-----------"
print "Subsumption only in B : "
for aeq in aeqlists:
print aeq.operand, "--", aeq.operations
eqlistPresent = False
print "Filtered :--------- "
print " "
print "----"
print "Removed Subsumptions : ", removedClasses
print "Removed Operations: ", OperationsRemoved
print "Removed SMA : ",SMARemoved
print "----"
print "Added Subsumptions : ", addedClasses
print "Added Operations: ", OperationsAdded
print "Added SMA : ",SMAAdded
print " "
print "SMAs added as well as removed : ",replacedSMA
print "Operations added as well as removed : ", replacedOps
# for op1 in aeqlists[0].operations:
# for op2 in beqlists[0].operations:
# if op1 == op2:
# print beqlists
# for aop in aeqlists:
# for bop in beqlists:
# if aop.operand == bop.operand:
# print "-------------"
# print aop.operand
# print aop.operations
# print bop.operations
#print a.eqList[4][0]
| 51.318328
| 130
| 0.416165
| 1,205
| 15,960
| 5.510373
| 0.088797
| 0.058283
| 0.071536
| 0.057831
| 0.832831
| 0.817169
| 0.80753
| 0.80753
| 0.80753
| 0.80753
| 0
| 0.016302
| 0.492669
| 15,960
| 311
| 131
| 51.318328
| 0.803754
| 0.060276
| 0
| 0.799213
| 0
| 0
| 0.103527
| 0.006412
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.003937
| null | null | 0.350394
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6248786a0da5b3972865c7c5478dc4c189520700
| 7,483
|
py
|
Python
|
tests/terraform/checks/resource/azure/test_IoTNoPublicNetworkAccess.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 4,013
|
2019-12-09T13:16:54.000Z
|
2022-03-31T14:31:01.000Z
|
tests/terraform/checks/resource/azure/test_IoTNoPublicNetworkAccess.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 1,258
|
2019-12-17T09:55:51.000Z
|
2022-03-31T19:17:17.000Z
|
tests/terraform/checks/resource/azure/test_IoTNoPublicNetworkAccess.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 638
|
2019-12-19T08:57:38.000Z
|
2022-03-30T21:38:37.000Z
|
import unittest
import hcl2
from checkov.terraform.checks.resource.azure.IoTNoPublicNetworkAccess import check
from checkov.common.models.enums import CheckResult
class TestIoTNoPublicNetworkAccess(unittest.TestCase):
def test_success_missing_attribute(self):
hcl_res = hcl2.loads("""
resource "azurerm_iothub" "example" {
name = "Example-IoTHub"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
sku {
name = "S1"
capacity = "1"
}
endpoint {
type = "AzureIotHub.StorageContainer"
connection_string = azurerm_storage_account.example.primary_blob_connection_string
name = "export"
batch_frequency_in_seconds = 60
max_chunk_size_in_bytes = 10485760
container_name = azurerm_storage_container.example.name
encoding = "Avro"
file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}"
}
endpoint {
type = "AzureIotHub.EventHub"
connection_string = azurerm_eventhub_authorization_rule.example.primary_connection_string
name = "export2"
}
route {
name = "export"
source = "DeviceMessages"
condition = "true"
endpoint_names = ["export"]
enabled = true
}
route {
name = "export2"
source = "DeviceMessages"
condition = "true"
endpoint_names = ["export2"]
enabled = true
}
enrichment {
key = "tenant"
value = "$twin.tags.Tenant"
endpoint_names = ["export", "export2"]
}
tags = {
purpose = "testing"
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_iothub']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_failure(self):
hcl_res = hcl2.loads("""
resource "azurerm_iothub" "example" {
name = "Example-IoTHub"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
sku {
name = "S1"
capacity = "1"
}
endpoint {
type = "AzureIotHub.StorageContainer"
connection_string = azurerm_storage_account.example.primary_blob_connection_string
name = "export"
batch_frequency_in_seconds = 60
max_chunk_size_in_bytes = 10485760
container_name = azurerm_storage_container.example.name
encoding = "Avro"
file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}"
}
endpoint {
type = "AzureIotHub.EventHub"
connection_string = azurerm_eventhub_authorization_rule.example.primary_connection_string
name = "export2"
}
public_network_access_enabled = true
route {
name = "export"
source = "DeviceMessages"
condition = "true"
endpoint_names = ["export"]
enabled = true
}
route {
name = "export2"
source = "DeviceMessages"
condition = "true"
endpoint_names = ["export2"]
enabled = true
}
enrichment {
key = "tenant"
value = "$twin.tags.Tenant"
endpoint_names = ["export", "export2"]
}
tags = {
purpose = "testing"
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_iothub']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def test_success(self):
hcl_res = hcl2.loads("""
resource "azurerm_iothub" "example" {
name = "Example-IoTHub"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
sku {
name = "S1"
capacity = "1"
}
endpoint {
type = "AzureIotHub.StorageContainer"
connection_string = azurerm_storage_account.example.primary_blob_connection_string
name = "export"
batch_frequency_in_seconds = 60
max_chunk_size_in_bytes = 10485760
container_name = azurerm_storage_container.example.name
encoding = "Avro"
file_name_format = "{iothub}/{partition}_{YYYY}_{MM}_{DD}_{HH}_{mm}"
}
endpoint {
type = "AzureIotHub.EventHub"
connection_string = azurerm_eventhub_authorization_rule.example.primary_connection_string
name = "export2"
}
route {
name = "export"
source = "DeviceMessages"
condition = "true"
endpoint_names = ["export"]
enabled = true
}
route {
name = "export2"
source = "DeviceMessages"
condition = "true"
endpoint_names = ["export2"]
enabled = true
}
enrichment {
key = "tenant"
value = "$twin.tags.Tenant"
endpoint_names = ["export", "export2"]
}
public_network_access_enabled = false
tags = {
purpose = "testing"
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_iothub']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
if __name__ == '__main__':
unittest.main()
| 38.572165
| 107
| 0.443539
| 515
| 7,483
| 6.128155
| 0.196117
| 0.060837
| 0.038023
| 0.051331
| 0.905894
| 0.889417
| 0.889417
| 0.889417
| 0.889417
| 0.889417
| 0
| 0.014271
| 0.484966
| 7,483
| 193
| 108
| 38.772021
| 0.804619
| 0
| 0
| 0.775758
| 0
| 0.018182
| 0.867032
| 0.165308
| 0
| 0
| 0
| 0
| 0.018182
| 1
| 0.018182
| false
| 0.012121
| 0.024242
| 0
| 0.048485
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6248827e546e22a9eec842e4b5b214545067d28c
| 123,378
|
py
|
Python
|
sdk/python/pulumi_google_native/monitoring/v3/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/monitoring/v3/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/monitoring/v3/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = [
'AggregationResponse',
'AlertStrategyResponse',
'AppEngineResponse',
'AvailabilityCriteriaResponse',
'BasicAuthenticationResponse',
'BasicSliResponse',
'CloudEndpointsResponse',
'ClusterIstioResponse',
'ConditionResponse',
'ContentMatcherResponse',
'CustomResponse',
'DistributionCutResponse',
'DocumentationResponse',
'GoogleMonitoringV3RangeResponse',
'HttpCheckResponse',
'InternalCheckerResponse',
'IstioCanonicalServiceResponse',
'LabelDescriptorResponse',
'LatencyCriteriaResponse',
'LogMatchResponse',
'MeshIstioResponse',
'MetricAbsenceResponse',
'MetricDescriptorMetadataResponse',
'MetricRangeResponse',
'MetricThresholdResponse',
'MonitoredResourceResponse',
'MonitoringQueryLanguageConditionResponse',
'MutationRecordResponse',
'NotificationRateLimitResponse',
'PerformanceThresholdResponse',
'RequestBasedSliResponse',
'ResourceGroupResponse',
'ServiceLevelIndicatorResponse',
'StatusResponse',
'TcpCheckResponse',
'TelemetryResponse',
'TimeSeriesRatioResponse',
'TriggerResponse',
'WindowsBasedSliResponse',
]
@pulumi.output_type
class AggregationResponse(dict):
"""
Describes how to combine multiple time series to provide a different view of the data. Aggregation of time series is done in two steps. First, each time series in the set is aligned to the same time interval boundaries, then the set of time series is optionally reduced in number.Alignment consists of applying the per_series_aligner operation to each time series after its data has been divided into regular alignment_period time intervals. This process takes all of the data points in an alignment period, applies a mathematical transformation such as averaging, minimum, maximum, delta, etc., and converts them into a single data point per period.Reduction is when the aligned and transformed time series can optionally be combined, reducing the number of time series through similar mathematical transformations. Reduction involves applying a cross_series_reducer to all the time series, optionally sorting the time series into subsets with group_by_fields, and applying the reducer to each subset.The raw time series data can contain a huge amount of information from multiple sources. Alignment and reduction transforms this mass of data into a more manageable and representative collection of data, for example "the 95% latency across the average of all tasks in a cluster". This representative data can be more easily graphed and comprehended, and the individual time series data is still available for later drilldown. For more details, see Filtering and aggregation (https://cloud.google.com/monitoring/api/v3/aggregation).
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "alignmentPeriod":
suggest = "alignment_period"
elif key == "crossSeriesReducer":
suggest = "cross_series_reducer"
elif key == "groupByFields":
suggest = "group_by_fields"
elif key == "perSeriesAligner":
suggest = "per_series_aligner"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AggregationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AggregationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AggregationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
alignment_period: str,
cross_series_reducer: str,
group_by_fields: Sequence[str],
per_series_aligner: str):
"""
Describes how to combine multiple time series to provide a different view of the data. Aggregation of time series is done in two steps. First, each time series in the set is aligned to the same time interval boundaries, then the set of time series is optionally reduced in number.Alignment consists of applying the per_series_aligner operation to each time series after its data has been divided into regular alignment_period time intervals. This process takes all of the data points in an alignment period, applies a mathematical transformation such as averaging, minimum, maximum, delta, etc., and converts them into a single data point per period.Reduction is when the aligned and transformed time series can optionally be combined, reducing the number of time series through similar mathematical transformations. Reduction involves applying a cross_series_reducer to all the time series, optionally sorting the time series into subsets with group_by_fields, and applying the reducer to each subset.The raw time series data can contain a huge amount of information from multiple sources. Alignment and reduction transforms this mass of data into a more manageable and representative collection of data, for example "the 95% latency across the average of all tasks in a cluster". This representative data can be more easily graphed and comprehended, and the individual time series data is still available for later drilldown. For more details, see Filtering and aggregation (https://cloud.google.com/monitoring/api/v3/aggregation).
:param str alignment_period: The alignment_period specifies a time interval, in seconds, that is used to divide the data in all the time series into consistent blocks of time. This will be done before the per-series aligner can be applied to the data.The value must be at least 60 seconds. If a per-series aligner other than ALIGN_NONE is specified, this field is required or an error is returned. If no per-series aligner is specified, or the aligner ALIGN_NONE is specified, then this field is ignored.The maximum value of the alignment_period is 104 weeks (2 years) for charts, and 90,000 seconds (25 hours) for alerting policies.
:param str cross_series_reducer: The reduction operation to be used to combine time series into a single time series, where the value of each data point in the resulting series is a function of all the already aligned values in the input time series.Not all reducer operations can be applied to all time series. The valid choices depend on the metric_kind and the value_type of the original time series. Reduction can yield a time series with a different metric_kind or value_type than the input time series.Time series data must first be aligned (see per_series_aligner) in order to perform cross-time series reduction. If cross_series_reducer is specified, then per_series_aligner must be specified, and must not be ALIGN_NONE. An alignment_period must also be specified; otherwise, an error is returned.
:param Sequence[str] group_by_fields: The set of fields to preserve when cross_series_reducer is specified. The group_by_fields determine how the time series are partitioned into subsets prior to applying the aggregation operation. Each subset contains time series that have the same value for each of the grouping fields. Each individual time series is a member of exactly one subset. The cross_series_reducer is applied to each subset of time series. It is not possible to reduce across different resource types, so this field implicitly contains resource.type. Fields not specified in group_by_fields are aggregated away. If group_by_fields is not specified and all the time series have the same resource type, then the time series are aggregated into a single output time series. If cross_series_reducer is not defined, this field is ignored.
:param str per_series_aligner: An Aligner describes how to bring the data points in a single time series into temporal alignment. Except for ALIGN_NONE, all alignments cause all the data points in an alignment_period to be mathematically grouped together, resulting in a single data point for each alignment_period with end timestamp at the end of the period.Not all alignment operations may be applied to all time series. The valid choices depend on the metric_kind and value_type of the original time series. Alignment can change the metric_kind or the value_type of the time series.Time series data must be aligned in order to perform cross-time series reduction. If cross_series_reducer is specified, then per_series_aligner must be specified and not equal to ALIGN_NONE and alignment_period must be specified; otherwise, an error is returned.
"""
pulumi.set(__self__, "alignment_period", alignment_period)
pulumi.set(__self__, "cross_series_reducer", cross_series_reducer)
pulumi.set(__self__, "group_by_fields", group_by_fields)
pulumi.set(__self__, "per_series_aligner", per_series_aligner)
@property
@pulumi.getter(name="alignmentPeriod")
def alignment_period(self) -> str:
"""
The alignment_period specifies a time interval, in seconds, that is used to divide the data in all the time series into consistent blocks of time. This will be done before the per-series aligner can be applied to the data.The value must be at least 60 seconds. If a per-series aligner other than ALIGN_NONE is specified, this field is required or an error is returned. If no per-series aligner is specified, or the aligner ALIGN_NONE is specified, then this field is ignored.The maximum value of the alignment_period is 104 weeks (2 years) for charts, and 90,000 seconds (25 hours) for alerting policies.
"""
return pulumi.get(self, "alignment_period")
@property
@pulumi.getter(name="crossSeriesReducer")
def cross_series_reducer(self) -> str:
"""
The reduction operation to be used to combine time series into a single time series, where the value of each data point in the resulting series is a function of all the already aligned values in the input time series.Not all reducer operations can be applied to all time series. The valid choices depend on the metric_kind and the value_type of the original time series. Reduction can yield a time series with a different metric_kind or value_type than the input time series.Time series data must first be aligned (see per_series_aligner) in order to perform cross-time series reduction. If cross_series_reducer is specified, then per_series_aligner must be specified, and must not be ALIGN_NONE. An alignment_period must also be specified; otherwise, an error is returned.
"""
return pulumi.get(self, "cross_series_reducer")
@property
@pulumi.getter(name="groupByFields")
def group_by_fields(self) -> Sequence[str]:
"""
The set of fields to preserve when cross_series_reducer is specified. The group_by_fields determine how the time series are partitioned into subsets prior to applying the aggregation operation. Each subset contains time series that have the same value for each of the grouping fields. Each individual time series is a member of exactly one subset. The cross_series_reducer is applied to each subset of time series. It is not possible to reduce across different resource types, so this field implicitly contains resource.type. Fields not specified in group_by_fields are aggregated away. If group_by_fields is not specified and all the time series have the same resource type, then the time series are aggregated into a single output time series. If cross_series_reducer is not defined, this field is ignored.
"""
return pulumi.get(self, "group_by_fields")
@property
@pulumi.getter(name="perSeriesAligner")
def per_series_aligner(self) -> str:
"""
An Aligner describes how to bring the data points in a single time series into temporal alignment. Except for ALIGN_NONE, all alignments cause all the data points in an alignment_period to be mathematically grouped together, resulting in a single data point for each alignment_period with end timestamp at the end of the period.Not all alignment operations may be applied to all time series. The valid choices depend on the metric_kind and value_type of the original time series. Alignment can change the metric_kind or the value_type of the time series.Time series data must be aligned in order to perform cross-time series reduction. If cross_series_reducer is specified, then per_series_aligner must be specified and not equal to ALIGN_NONE and alignment_period must be specified; otherwise, an error is returned.
"""
return pulumi.get(self, "per_series_aligner")
@pulumi.output_type
class AlertStrategyResponse(dict):
"""
Control over how the notification channels in notification_channels are notified when this alert fires.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "autoClose":
suggest = "auto_close"
elif key == "notificationRateLimit":
suggest = "notification_rate_limit"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AlertStrategyResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AlertStrategyResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AlertStrategyResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
auto_close: str,
notification_rate_limit: 'outputs.NotificationRateLimitResponse'):
"""
Control over how the notification channels in notification_channels are notified when this alert fires.
:param str auto_close: If an alert policy that was active has no data for this long, any open incidents will close
:param 'NotificationRateLimitResponse' notification_rate_limit: Required for alert policies with a LogMatch condition.This limit is not implemented for alert policies that are not log-based.
"""
pulumi.set(__self__, "auto_close", auto_close)
pulumi.set(__self__, "notification_rate_limit", notification_rate_limit)
@property
@pulumi.getter(name="autoClose")
def auto_close(self) -> str:
"""
If an alert policy that was active has no data for this long, any open incidents will close
"""
return pulumi.get(self, "auto_close")
@property
@pulumi.getter(name="notificationRateLimit")
def notification_rate_limit(self) -> 'outputs.NotificationRateLimitResponse':
"""
Required for alert policies with a LogMatch condition.This limit is not implemented for alert policies that are not log-based.
"""
return pulumi.get(self, "notification_rate_limit")
@pulumi.output_type
class AppEngineResponse(dict):
"""
App Engine service. Learn more at https://cloud.google.com/appengine.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "moduleId":
suggest = "module_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AppEngineResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AppEngineResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AppEngineResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
module_id: str):
"""
App Engine service. Learn more at https://cloud.google.com/appengine.
:param str module_id: The ID of the App Engine module underlying this service. Corresponds to the module_id resource label in the gae_app monitored resource: https://cloud.google.com/monitoring/api/resources#tag_gae_app
"""
pulumi.set(__self__, "module_id", module_id)
@property
@pulumi.getter(name="moduleId")
def module_id(self) -> str:
"""
The ID of the App Engine module underlying this service. Corresponds to the module_id resource label in the gae_app monitored resource: https://cloud.google.com/monitoring/api/resources#tag_gae_app
"""
return pulumi.get(self, "module_id")
@pulumi.output_type
class AvailabilityCriteriaResponse(dict):
"""
Future parameters for the availability SLI.
"""
def __init__(__self__):
"""
Future parameters for the availability SLI.
"""
pass
@pulumi.output_type
class BasicAuthenticationResponse(dict):
"""
The authentication parameters to provide to the specified resource or URL that requires a username and password. Currently, only Basic HTTP authentication (https://tools.ietf.org/html/rfc7617) is supported in Uptime checks.
"""
def __init__(__self__, *,
password: str,
username: str):
"""
The authentication parameters to provide to the specified resource or URL that requires a username and password. Currently, only Basic HTTP authentication (https://tools.ietf.org/html/rfc7617) is supported in Uptime checks.
:param str password: The password to use when authenticating with the HTTP server.
:param str username: The username to use when authenticating with the HTTP server.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password to use when authenticating with the HTTP server.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The username to use when authenticating with the HTTP server.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class BasicSliResponse(dict):
"""
An SLI measuring performance on a well-known service type. Performance will be computed on the basis of pre-defined metrics. The type of the service_resource determines the metrics to use and the service_resource.labels and metric_labels are used to construct a monitoring filter to filter that metric down to just the data relevant to this service.
"""
def __init__(__self__, *,
availability: 'outputs.AvailabilityCriteriaResponse',
latency: 'outputs.LatencyCriteriaResponse',
location: Sequence[str],
method: Sequence[str],
version: Sequence[str]):
"""
An SLI measuring performance on a well-known service type. Performance will be computed on the basis of pre-defined metrics. The type of the service_resource determines the metrics to use and the service_resource.labels and metric_labels are used to construct a monitoring filter to filter that metric down to just the data relevant to this service.
:param 'AvailabilityCriteriaResponse' availability: Good service is defined to be the count of requests made to this service that return successfully.
:param 'LatencyCriteriaResponse' latency: Good service is defined to be the count of requests made to this service that are fast enough with respect to latency.threshold.
:param Sequence[str] location: OPTIONAL: The set of locations to which this SLI is relevant. Telemetry from other locations will not be used to calculate performance for this SLI. If omitted, this SLI applies to all locations in which the Service has activity. For service types that don't support breaking down by location, setting this field will result in an error.
:param Sequence[str] method: OPTIONAL: The set of RPCs to which this SLI is relevant. Telemetry from other methods will not be used to calculate performance for this SLI. If omitted, this SLI applies to all the Service's methods. For service types that don't support breaking down by method, setting this field will result in an error.
:param Sequence[str] version: OPTIONAL: The set of API versions to which this SLI is relevant. Telemetry from other API versions will not be used to calculate performance for this SLI. If omitted, this SLI applies to all API versions. For service types that don't support breaking down by version, setting this field will result in an error.
"""
pulumi.set(__self__, "availability", availability)
pulumi.set(__self__, "latency", latency)
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "method", method)
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def availability(self) -> 'outputs.AvailabilityCriteriaResponse':
"""
Good service is defined to be the count of requests made to this service that return successfully.
"""
return pulumi.get(self, "availability")
@property
@pulumi.getter
def latency(self) -> 'outputs.LatencyCriteriaResponse':
"""
Good service is defined to be the count of requests made to this service that are fast enough with respect to latency.threshold.
"""
return pulumi.get(self, "latency")
@property
@pulumi.getter
def location(self) -> Sequence[str]:
"""
OPTIONAL: The set of locations to which this SLI is relevant. Telemetry from other locations will not be used to calculate performance for this SLI. If omitted, this SLI applies to all locations in which the Service has activity. For service types that don't support breaking down by location, setting this field will result in an error.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def method(self) -> Sequence[str]:
"""
OPTIONAL: The set of RPCs to which this SLI is relevant. Telemetry from other methods will not be used to calculate performance for this SLI. If omitted, this SLI applies to all the Service's methods. For service types that don't support breaking down by method, setting this field will result in an error.
"""
return pulumi.get(self, "method")
@property
@pulumi.getter
def version(self) -> Sequence[str]:
"""
OPTIONAL: The set of API versions to which this SLI is relevant. Telemetry from other API versions will not be used to calculate performance for this SLI. If omitted, this SLI applies to all API versions. For service types that don't support breaking down by version, setting this field will result in an error.
"""
return pulumi.get(self, "version")
@pulumi.output_type
class CloudEndpointsResponse(dict):
"""
Cloud Endpoints service. Learn more at https://cloud.google.com/endpoints.
"""
def __init__(__self__, *,
service: str):
"""
Cloud Endpoints service. Learn more at https://cloud.google.com/endpoints.
:param str service: The name of the Cloud Endpoints service underlying this service. Corresponds to the service resource label in the api monitored resource: https://cloud.google.com/monitoring/api/resources#tag_api
"""
pulumi.set(__self__, "service", service)
@property
@pulumi.getter
def service(self) -> str:
"""
The name of the Cloud Endpoints service underlying this service. Corresponds to the service resource label in the api monitored resource: https://cloud.google.com/monitoring/api/resources#tag_api
"""
return pulumi.get(self, "service")
@pulumi.output_type
class ClusterIstioResponse(dict):
"""
Istio service scoped to a single Kubernetes cluster. Learn more at https://istio.io. Clusters running OSS Istio will have their services ingested as this type.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "clusterName":
suggest = "cluster_name"
elif key == "serviceName":
suggest = "service_name"
elif key == "serviceNamespace":
suggest = "service_namespace"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterIstioResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterIstioResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterIstioResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cluster_name: str,
location: str,
service_name: str,
service_namespace: str):
"""
Istio service scoped to a single Kubernetes cluster. Learn more at https://istio.io. Clusters running OSS Istio will have their services ingested as this type.
:param str cluster_name: The name of the Kubernetes cluster in which this Istio service is defined. Corresponds to the cluster_name resource label in k8s_cluster resources.
:param str location: The location of the Kubernetes cluster in which this Istio service is defined. Corresponds to the location resource label in k8s_cluster resources.
:param str service_name: The name of the Istio service underlying this service. Corresponds to the destination_service_name metric label in Istio metrics.
:param str service_namespace: The namespace of the Istio service underlying this service. Corresponds to the destination_service_namespace metric label in Istio metrics.
"""
pulumi.set(__self__, "cluster_name", cluster_name)
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "service_name", service_name)
pulumi.set(__self__, "service_namespace", service_namespace)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> str:
"""
The name of the Kubernetes cluster in which this Istio service is defined. Corresponds to the cluster_name resource label in k8s_cluster resources.
"""
return pulumi.get(self, "cluster_name")
@property
@pulumi.getter
def location(self) -> str:
"""
The location of the Kubernetes cluster in which this Istio service is defined. Corresponds to the location resource label in k8s_cluster resources.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> str:
"""
The name of the Istio service underlying this service. Corresponds to the destination_service_name metric label in Istio metrics.
"""
return pulumi.get(self, "service_name")
@property
@pulumi.getter(name="serviceNamespace")
def service_namespace(self) -> str:
"""
The namespace of the Istio service underlying this service. Corresponds to the destination_service_namespace metric label in Istio metrics.
"""
return pulumi.get(self, "service_namespace")
@pulumi.output_type
class ConditionResponse(dict):
"""
A condition is a true/false test that determines when an alerting policy should open an incident. If a condition evaluates to true, it signifies that something is wrong.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "conditionAbsent":
suggest = "condition_absent"
elif key == "conditionMatchedLog":
suggest = "condition_matched_log"
elif key == "conditionMonitoringQueryLanguage":
suggest = "condition_monitoring_query_language"
elif key == "conditionThreshold":
suggest = "condition_threshold"
elif key == "displayName":
suggest = "display_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConditionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConditionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConditionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
condition_absent: 'outputs.MetricAbsenceResponse',
condition_matched_log: 'outputs.LogMatchResponse',
condition_monitoring_query_language: 'outputs.MonitoringQueryLanguageConditionResponse',
condition_threshold: 'outputs.MetricThresholdResponse',
display_name: str,
name: str):
"""
A condition is a true/false test that determines when an alerting policy should open an incident. If a condition evaluates to true, it signifies that something is wrong.
:param 'MetricAbsenceResponse' condition_absent: A condition that checks that a time series continues to receive new data points.
:param 'LogMatchResponse' condition_matched_log: A condition that checks for log messages matching given constraints. If set, no other conditions can be present.
:param 'MonitoringQueryLanguageConditionResponse' condition_monitoring_query_language: A condition that uses the Monitoring Query Language to define alerts.
:param 'MetricThresholdResponse' condition_threshold: A condition that compares a time series against a threshold.
:param str display_name: A short name or phrase used to identify the condition in dashboards, notifications, and incidents. To avoid confusion, don't use the same display name for multiple conditions in the same policy.
:param str name: Required if the condition exists. The unique resource name for this condition. Its format is: projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID] [CONDITION_ID] is assigned by Stackdriver Monitoring when the condition is created as part of a new or updated alerting policy.When calling the alertPolicies.create method, do not include the name field in the conditions of the requested alerting policy. Stackdriver Monitoring creates the condition identifiers and includes them in the new policy.When calling the alertPolicies.update method to update a policy, including a condition name causes the existing condition to be updated. Conditions without names are added to the updated policy. Existing conditions are deleted if they are not updated.Best practice is to preserve [CONDITION_ID] if you make only small changes, such as those to condition thresholds, durations, or trigger values. Otherwise, treat the change as a new condition and let the existing condition be deleted.
"""
pulumi.set(__self__, "condition_absent", condition_absent)
pulumi.set(__self__, "condition_matched_log", condition_matched_log)
pulumi.set(__self__, "condition_monitoring_query_language", condition_monitoring_query_language)
pulumi.set(__self__, "condition_threshold", condition_threshold)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="conditionAbsent")
def condition_absent(self) -> 'outputs.MetricAbsenceResponse':
"""
A condition that checks that a time series continues to receive new data points.
"""
return pulumi.get(self, "condition_absent")
@property
@pulumi.getter(name="conditionMatchedLog")
def condition_matched_log(self) -> 'outputs.LogMatchResponse':
"""
A condition that checks for log messages matching given constraints. If set, no other conditions can be present.
"""
return pulumi.get(self, "condition_matched_log")
@property
@pulumi.getter(name="conditionMonitoringQueryLanguage")
def condition_monitoring_query_language(self) -> 'outputs.MonitoringQueryLanguageConditionResponse':
"""
A condition that uses the Monitoring Query Language to define alerts.
"""
return pulumi.get(self, "condition_monitoring_query_language")
@property
@pulumi.getter(name="conditionThreshold")
def condition_threshold(self) -> 'outputs.MetricThresholdResponse':
"""
A condition that compares a time series against a threshold.
"""
return pulumi.get(self, "condition_threshold")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
A short name or phrase used to identify the condition in dashboards, notifications, and incidents. To avoid confusion, don't use the same display name for multiple conditions in the same policy.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def name(self) -> str:
"""
Required if the condition exists. The unique resource name for this condition. Its format is: projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID] [CONDITION_ID] is assigned by Stackdriver Monitoring when the condition is created as part of a new or updated alerting policy.When calling the alertPolicies.create method, do not include the name field in the conditions of the requested alerting policy. Stackdriver Monitoring creates the condition identifiers and includes them in the new policy.When calling the alertPolicies.update method to update a policy, including a condition name causes the existing condition to be updated. Conditions without names are added to the updated policy. Existing conditions are deleted if they are not updated.Best practice is to preserve [CONDITION_ID] if you make only small changes, such as those to condition thresholds, durations, or trigger values. Otherwise, treat the change as a new condition and let the existing condition be deleted.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class ContentMatcherResponse(dict):
"""
Optional. Used to perform content matching. This allows matching based on substrings and regular expressions, together with their negations. Only the first 4 MB of an HTTP or HTTPS check's response (and the first 1 MB of a TCP check's response) are examined for purposes of content matching.
"""
def __init__(__self__, *,
content: str,
matcher: str):
"""
Optional. Used to perform content matching. This allows matching based on substrings and regular expressions, together with their negations. Only the first 4 MB of an HTTP or HTTPS check's response (and the first 1 MB of a TCP check's response) are examined for purposes of content matching.
:param str content: String or regex content to match. Maximum 1024 bytes. An empty content string indicates no content matching is to be performed.
:param str matcher: The type of content matcher that will be applied to the server output, compared to the content string when the check is run.
"""
pulumi.set(__self__, "content", content)
pulumi.set(__self__, "matcher", matcher)
@property
@pulumi.getter
def content(self) -> str:
"""
String or regex content to match. Maximum 1024 bytes. An empty content string indicates no content matching is to be performed.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter
def matcher(self) -> str:
"""
The type of content matcher that will be applied to the server output, compared to the content string when the check is run.
"""
return pulumi.get(self, "matcher")
@pulumi.output_type
class CustomResponse(dict):
"""
Custom view of service telemetry. Currently a place-holder pending final design.
"""
def __init__(__self__):
"""
Custom view of service telemetry. Currently a place-holder pending final design.
"""
pass
@pulumi.output_type
class DistributionCutResponse(dict):
"""
A DistributionCut defines a TimeSeries and thresholds used for measuring good service and total service. The TimeSeries must have ValueType = DISTRIBUTION and MetricKind = DELTA or MetricKind = CUMULATIVE. The computed good_service will be the estimated count of values in the Distribution that fall within the specified min and max.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "distributionFilter":
suggest = "distribution_filter"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DistributionCutResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DistributionCutResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DistributionCutResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
distribution_filter: str,
range: 'outputs.GoogleMonitoringV3RangeResponse'):
"""
A DistributionCut defines a TimeSeries and thresholds used for measuring good service and total service. The TimeSeries must have ValueType = DISTRIBUTION and MetricKind = DELTA or MetricKind = CUMULATIVE. The computed good_service will be the estimated count of values in the Distribution that fall within the specified min and max.
:param str distribution_filter: A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries aggregating values. Must have ValueType = DISTRIBUTION and MetricKind = DELTA or MetricKind = CUMULATIVE.
:param 'GoogleMonitoringV3RangeResponse' range: Range of values considered "good." For a one-sided range, set one bound to an infinite value.
"""
pulumi.set(__self__, "distribution_filter", distribution_filter)
pulumi.set(__self__, "range", range)
@property
@pulumi.getter(name="distributionFilter")
def distribution_filter(self) -> str:
"""
A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries aggregating values. Must have ValueType = DISTRIBUTION and MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "distribution_filter")
@property
@pulumi.getter
def range(self) -> 'outputs.GoogleMonitoringV3RangeResponse':
"""
Range of values considered "good." For a one-sided range, set one bound to an infinite value.
"""
return pulumi.get(self, "range")
@pulumi.output_type
class DocumentationResponse(dict):
"""
A content string and a MIME type that describes the content string's format.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "mimeType":
suggest = "mime_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DocumentationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DocumentationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DocumentationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
content: str,
mime_type: str):
"""
A content string and a MIME type that describes the content string's format.
:param str content: The text of the documentation, interpreted according to mime_type. The content may not exceed 8,192 Unicode characters and may not exceed more than 10,240 bytes when encoded in UTF-8 format, whichever is smaller.
:param str mime_type: The format of the content field. Presently, only the value "text/markdown" is supported. See Markdown (https://en.wikipedia.org/wiki/Markdown) for more information.
"""
pulumi.set(__self__, "content", content)
pulumi.set(__self__, "mime_type", mime_type)
@property
@pulumi.getter
def content(self) -> str:
"""
The text of the documentation, interpreted according to mime_type. The content may not exceed 8,192 Unicode characters and may not exceed more than 10,240 bytes when encoded in UTF-8 format, whichever is smaller.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter(name="mimeType")
def mime_type(self) -> str:
"""
The format of the content field. Presently, only the value "text/markdown" is supported. See Markdown (https://en.wikipedia.org/wiki/Markdown) for more information.
"""
return pulumi.get(self, "mime_type")
@pulumi.output_type
class GoogleMonitoringV3RangeResponse(dict):
"""
Range of numerical values within min and max.
"""
def __init__(__self__, *,
max: float,
min: float):
"""
Range of numerical values within min and max.
:param float max: Range maximum.
:param float min: Range minimum.
"""
pulumi.set(__self__, "max", max)
pulumi.set(__self__, "min", min)
@property
@pulumi.getter
def max(self) -> float:
"""
Range maximum.
"""
return pulumi.get(self, "max")
@property
@pulumi.getter
def min(self) -> float:
"""
Range minimum.
"""
return pulumi.get(self, "min")
@pulumi.output_type
class HttpCheckResponse(dict):
"""
Information involved in an HTTP/HTTPS Uptime check request.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "authInfo":
suggest = "auth_info"
elif key == "contentType":
suggest = "content_type"
elif key == "maskHeaders":
suggest = "mask_headers"
elif key == "requestMethod":
suggest = "request_method"
elif key == "useSsl":
suggest = "use_ssl"
elif key == "validateSsl":
suggest = "validate_ssl"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in HttpCheckResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
HttpCheckResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
HttpCheckResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
auth_info: 'outputs.BasicAuthenticationResponse',
body: str,
content_type: str,
headers: Mapping[str, str],
mask_headers: bool,
path: str,
port: int,
request_method: str,
use_ssl: bool,
validate_ssl: bool):
"""
Information involved in an HTTP/HTTPS Uptime check request.
:param 'BasicAuthenticationResponse' auth_info: The authentication information. Optional when creating an HTTP check; defaults to empty.
:param str body: The request body associated with the HTTP POST request. If content_type is URL_ENCODED, the body passed in must be URL-encoded. Users can provide a Content-Length header via the headers field or the API will do so. If the request_method is GET and body is not empty, the API will return an error. The maximum byte size is 1 megabyte.Note: If client libraries aren't used (which performs the conversion automatically) base64 encode your body data since the field is of bytes type.
:param str content_type: The content type header to use for the check. The following configurations result in errors: 1. Content type is specified in both the headers field and the content_type field. 2. Request method is GET and content_type is not TYPE_UNSPECIFIED 3. Request method is POST and content_type is TYPE_UNSPECIFIED. 4. Request method is POST and a "Content-Type" header is provided via headers field. The content_type field should be used instead.
:param Mapping[str, str] headers: The list of headers to send as part of the Uptime check request. If two headers have the same key and different values, they should be entered as a single header, with the value being a comma-separated list of all the desired values as described at https://www.w3.org/Protocols/rfc2616/rfc2616.txt (page 31). Entering two separate headers with the same key in a Create call will cause the first to be overwritten by the second. The maximum number of headers allowed is 100.
:param bool mask_headers: Boolean specifying whether to encrypt the header information. Encryption should be specified for any headers related to authentication that you do not wish to be seen when retrieving the configuration. The server will be responsible for encrypting the headers. On Get/List calls, if mask_headers is set to true then the headers will be obscured with ******.
:param str path: Optional (defaults to "/"). The path to the page against which to run the check. Will be combined with the host (specified within the monitored_resource) and port to construct the full URL. If the provided path does not begin with "/", a "/" will be prepended automatically.
:param int port: Optional (defaults to 80 when use_ssl is false, and 443 when use_ssl is true). The TCP port on the HTTP server against which to run the check. Will be combined with host (specified within the monitored_resource) and path to construct the full URL.
:param str request_method: The HTTP request method to use for the check. If set to METHOD_UNSPECIFIED then request_method defaults to GET.
:param bool use_ssl: If true, use HTTPS instead of HTTP to run the check.
:param bool validate_ssl: Boolean specifying whether to include SSL certificate validation as a part of the Uptime check. Only applies to checks where monitored_resource is set to uptime_url. If use_ssl is false, setting validate_ssl to true has no effect.
"""
pulumi.set(__self__, "auth_info", auth_info)
pulumi.set(__self__, "body", body)
pulumi.set(__self__, "content_type", content_type)
pulumi.set(__self__, "headers", headers)
pulumi.set(__self__, "mask_headers", mask_headers)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "request_method", request_method)
pulumi.set(__self__, "use_ssl", use_ssl)
pulumi.set(__self__, "validate_ssl", validate_ssl)
@property
@pulumi.getter(name="authInfo")
def auth_info(self) -> 'outputs.BasicAuthenticationResponse':
"""
The authentication information. Optional when creating an HTTP check; defaults to empty.
"""
return pulumi.get(self, "auth_info")
@property
@pulumi.getter
def body(self) -> str:
"""
The request body associated with the HTTP POST request. If content_type is URL_ENCODED, the body passed in must be URL-encoded. Users can provide a Content-Length header via the headers field or the API will do so. If the request_method is GET and body is not empty, the API will return an error. The maximum byte size is 1 megabyte.Note: If client libraries aren't used (which performs the conversion automatically) base64 encode your body data since the field is of bytes type.
"""
return pulumi.get(self, "body")
@property
@pulumi.getter(name="contentType")
def content_type(self) -> str:
"""
The content type header to use for the check. The following configurations result in errors: 1. Content type is specified in both the headers field and the content_type field. 2. Request method is GET and content_type is not TYPE_UNSPECIFIED 3. Request method is POST and content_type is TYPE_UNSPECIFIED. 4. Request method is POST and a "Content-Type" header is provided via headers field. The content_type field should be used instead.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter
def headers(self) -> Mapping[str, str]:
"""
The list of headers to send as part of the Uptime check request. If two headers have the same key and different values, they should be entered as a single header, with the value being a comma-separated list of all the desired values as described at https://www.w3.org/Protocols/rfc2616/rfc2616.txt (page 31). Entering two separate headers with the same key in a Create call will cause the first to be overwritten by the second. The maximum number of headers allowed is 100.
"""
return pulumi.get(self, "headers")
@property
@pulumi.getter(name="maskHeaders")
def mask_headers(self) -> bool:
"""
Boolean specifying whether to encrypt the header information. Encryption should be specified for any headers related to authentication that you do not wish to be seen when retrieving the configuration. The server will be responsible for encrypting the headers. On Get/List calls, if mask_headers is set to true then the headers will be obscured with ******.
"""
return pulumi.get(self, "mask_headers")
@property
@pulumi.getter
def path(self) -> str:
"""
Optional (defaults to "/"). The path to the page against which to run the check. Will be combined with the host (specified within the monitored_resource) and port to construct the full URL. If the provided path does not begin with "/", a "/" will be prepended automatically.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> int:
"""
Optional (defaults to 80 when use_ssl is false, and 443 when use_ssl is true). The TCP port on the HTTP server against which to run the check. Will be combined with host (specified within the monitored_resource) and path to construct the full URL.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="requestMethod")
def request_method(self) -> str:
"""
The HTTP request method to use for the check. If set to METHOD_UNSPECIFIED then request_method defaults to GET.
"""
return pulumi.get(self, "request_method")
@property
@pulumi.getter(name="useSsl")
def use_ssl(self) -> bool:
"""
If true, use HTTPS instead of HTTP to run the check.
"""
return pulumi.get(self, "use_ssl")
@property
@pulumi.getter(name="validateSsl")
def validate_ssl(self) -> bool:
"""
Boolean specifying whether to include SSL certificate validation as a part of the Uptime check. Only applies to checks where monitored_resource is set to uptime_url. If use_ssl is false, setting validate_ssl to true has no effect.
"""
return pulumi.get(self, "validate_ssl")
@pulumi.output_type
class InternalCheckerResponse(dict):
"""
An internal checker allows Uptime checks to run on private/internal GCP resources.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "displayName":
suggest = "display_name"
elif key == "gcpZone":
suggest = "gcp_zone"
elif key == "peerProjectId":
suggest = "peer_project_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in InternalCheckerResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
InternalCheckerResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
InternalCheckerResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
display_name: str,
gcp_zone: str,
name: str,
network: str,
peer_project_id: str,
state: str):
"""
An internal checker allows Uptime checks to run on private/internal GCP resources.
:param str display_name: The checker's human-readable name. The display name should be unique within a Stackdriver Workspace in order to make it easier to identify; however, uniqueness is not enforced.
:param str gcp_zone: The GCP zone the Uptime check should egress from. Only respected for internal Uptime checks, where internal_network is specified.
:param str name: A unique resource name for this InternalChecker. The format is: projects/[PROJECT_ID_OR_NUMBER]/internalCheckers/[INTERNAL_CHECKER_ID] [PROJECT_ID_OR_NUMBER] is the Stackdriver Workspace project for the Uptime check config associated with the internal checker.
:param str network: The GCP VPC network (https://cloud.google.com/vpc/docs/vpc) where the internal resource lives (ex: "default").
:param str peer_project_id: The GCP project ID where the internal checker lives. Not necessary the same as the Workspace project.
:param str state: The current operational state of the internal checker.
"""
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "gcp_zone", gcp_zone)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "network", network)
pulumi.set(__self__, "peer_project_id", peer_project_id)
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
The checker's human-readable name. The display name should be unique within a Stackdriver Workspace in order to make it easier to identify; however, uniqueness is not enforced.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="gcpZone")
def gcp_zone(self) -> str:
"""
The GCP zone the Uptime check should egress from. Only respected for internal Uptime checks, where internal_network is specified.
"""
return pulumi.get(self, "gcp_zone")
@property
@pulumi.getter
def name(self) -> str:
"""
A unique resource name for this InternalChecker. The format is: projects/[PROJECT_ID_OR_NUMBER]/internalCheckers/[INTERNAL_CHECKER_ID] [PROJECT_ID_OR_NUMBER] is the Stackdriver Workspace project for the Uptime check config associated with the internal checker.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> str:
"""
The GCP VPC network (https://cloud.google.com/vpc/docs/vpc) where the internal resource lives (ex: "default").
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="peerProjectId")
def peer_project_id(self) -> str:
"""
The GCP project ID where the internal checker lives. Not necessary the same as the Workspace project.
"""
return pulumi.get(self, "peer_project_id")
@property
@pulumi.getter
def state(self) -> str:
"""
The current operational state of the internal checker.
"""
return pulumi.get(self, "state")
@pulumi.output_type
class IstioCanonicalServiceResponse(dict):
"""
Canonical service scoped to an Istio mesh. Anthos clusters running ASM >= 1.6.8 will have their services ingested as this type.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "canonicalService":
suggest = "canonical_service"
elif key == "canonicalServiceNamespace":
suggest = "canonical_service_namespace"
elif key == "meshUid":
suggest = "mesh_uid"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in IstioCanonicalServiceResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
IstioCanonicalServiceResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
IstioCanonicalServiceResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
canonical_service: str,
canonical_service_namespace: str,
mesh_uid: str):
"""
Canonical service scoped to an Istio mesh. Anthos clusters running ASM >= 1.6.8 will have their services ingested as this type.
:param str canonical_service: The name of the canonical service underlying this service. Corresponds to the destination_canonical_service_name metric label in label in Istio metrics (https://cloud.google.com/monitoring/api/metrics_istio).
:param str canonical_service_namespace: The namespace of the canonical service underlying this service. Corresponds to the destination_canonical_service_namespace metric label in Istio metrics (https://cloud.google.com/monitoring/api/metrics_istio).
:param str mesh_uid: Identifier for the Istio mesh in which this canonical service is defined. Corresponds to the mesh_uid metric label in Istio metrics (https://cloud.google.com/monitoring/api/metrics_istio).
"""
pulumi.set(__self__, "canonical_service", canonical_service)
pulumi.set(__self__, "canonical_service_namespace", canonical_service_namespace)
pulumi.set(__self__, "mesh_uid", mesh_uid)
@property
@pulumi.getter(name="canonicalService")
def canonical_service(self) -> str:
"""
The name of the canonical service underlying this service. Corresponds to the destination_canonical_service_name metric label in label in Istio metrics (https://cloud.google.com/monitoring/api/metrics_istio).
"""
return pulumi.get(self, "canonical_service")
@property
@pulumi.getter(name="canonicalServiceNamespace")
def canonical_service_namespace(self) -> str:
"""
The namespace of the canonical service underlying this service. Corresponds to the destination_canonical_service_namespace metric label in Istio metrics (https://cloud.google.com/monitoring/api/metrics_istio).
"""
return pulumi.get(self, "canonical_service_namespace")
@property
@pulumi.getter(name="meshUid")
def mesh_uid(self) -> str:
"""
Identifier for the Istio mesh in which this canonical service is defined. Corresponds to the mesh_uid metric label in Istio metrics (https://cloud.google.com/monitoring/api/metrics_istio).
"""
return pulumi.get(self, "mesh_uid")
@pulumi.output_type
class LabelDescriptorResponse(dict):
"""
A description of a label.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "valueType":
suggest = "value_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in LabelDescriptorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
LabelDescriptorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
LabelDescriptorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
description: str,
key: str,
value_type: str):
"""
A description of a label.
:param str description: A human-readable description for the label.
:param str key: The key for this label. The key must meet the following criteria: Does not exceed 100 characters. Matches the following regular expression: [a-zA-Z][a-zA-Z0-9_]* The first character must be an upper- or lower-case letter. The remaining characters must be letters, digits, or underscores.
:param str value_type: The type of data that can be assigned to the label.
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value_type", value_type)
@property
@pulumi.getter
def description(self) -> str:
"""
A human-readable description for the label.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def key(self) -> str:
"""
The key for this label. The key must meet the following criteria: Does not exceed 100 characters. Matches the following regular expression: [a-zA-Z][a-zA-Z0-9_]* The first character must be an upper- or lower-case letter. The remaining characters must be letters, digits, or underscores.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter(name="valueType")
def value_type(self) -> str:
"""
The type of data that can be assigned to the label.
"""
return pulumi.get(self, "value_type")
@pulumi.output_type
class LatencyCriteriaResponse(dict):
"""
Parameters for a latency threshold SLI.
"""
def __init__(__self__, *,
threshold: str):
"""
Parameters for a latency threshold SLI.
:param str threshold: Good service is defined to be the count of requests made to this service that return in no more than threshold.
"""
pulumi.set(__self__, "threshold", threshold)
@property
@pulumi.getter
def threshold(self) -> str:
"""
Good service is defined to be the count of requests made to this service that return in no more than threshold.
"""
return pulumi.get(self, "threshold")
@pulumi.output_type
class LogMatchResponse(dict):
"""
A condition type that checks whether a log message in the scoping project (https://cloud.google.com/monitoring/api/v3#project_name) satisfies the given filter. Logs from other projects in the metrics scope are not evaluated.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "labelExtractors":
suggest = "label_extractors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in LogMatchResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
LogMatchResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
LogMatchResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
filter: str,
label_extractors: Mapping[str, str]):
"""
A condition type that checks whether a log message in the scoping project (https://cloud.google.com/monitoring/api/v3#project_name) satisfies the given filter. Logs from other projects in the metrics scope are not evaluated.
:param str filter: A logs-based filter. See Advanced Logs Queries (https://cloud.google.com/logging/docs/view/advanced-queries) for how this filter should be constructed.
:param Mapping[str, str] label_extractors: Optional. A map from a label key to an extractor expression, which is used to extract the value for this label key. Each entry in this map is a specification for how data should be extracted from log entries that match filter. Each combination of extracted values is treated as a separate rule for the purposes of triggering notifications. Label keys and corresponding values can be used in notifications generated by this condition.Please see the documentation on logs-based metric valueExtractors (https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics#LogMetric.FIELDS.value_extractor) for syntax and examples.
"""
pulumi.set(__self__, "filter", filter)
pulumi.set(__self__, "label_extractors", label_extractors)
@property
@pulumi.getter
def filter(self) -> str:
"""
A logs-based filter. See Advanced Logs Queries (https://cloud.google.com/logging/docs/view/advanced-queries) for how this filter should be constructed.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter(name="labelExtractors")
def label_extractors(self) -> Mapping[str, str]:
"""
Optional. A map from a label key to an extractor expression, which is used to extract the value for this label key. Each entry in this map is a specification for how data should be extracted from log entries that match filter. Each combination of extracted values is treated as a separate rule for the purposes of triggering notifications. Label keys and corresponding values can be used in notifications generated by this condition.Please see the documentation on logs-based metric valueExtractors (https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics#LogMetric.FIELDS.value_extractor) for syntax and examples.
"""
return pulumi.get(self, "label_extractors")
@pulumi.output_type
class MeshIstioResponse(dict):
"""
Istio service scoped to an Istio mesh. Anthos clusters running ASM < 1.6.8 will have their services ingested as this type.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "meshUid":
suggest = "mesh_uid"
elif key == "serviceName":
suggest = "service_name"
elif key == "serviceNamespace":
suggest = "service_namespace"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MeshIstioResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MeshIstioResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MeshIstioResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
mesh_uid: str,
service_name: str,
service_namespace: str):
"""
Istio service scoped to an Istio mesh. Anthos clusters running ASM < 1.6.8 will have their services ingested as this type.
:param str mesh_uid: Identifier for the mesh in which this Istio service is defined. Corresponds to the mesh_uid metric label in Istio metrics.
:param str service_name: The name of the Istio service underlying this service. Corresponds to the destination_service_name metric label in Istio metrics.
:param str service_namespace: The namespace of the Istio service underlying this service. Corresponds to the destination_service_namespace metric label in Istio metrics.
"""
pulumi.set(__self__, "mesh_uid", mesh_uid)
pulumi.set(__self__, "service_name", service_name)
pulumi.set(__self__, "service_namespace", service_namespace)
@property
@pulumi.getter(name="meshUid")
def mesh_uid(self) -> str:
"""
Identifier for the mesh in which this Istio service is defined. Corresponds to the mesh_uid metric label in Istio metrics.
"""
return pulumi.get(self, "mesh_uid")
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> str:
"""
The name of the Istio service underlying this service. Corresponds to the destination_service_name metric label in Istio metrics.
"""
return pulumi.get(self, "service_name")
@property
@pulumi.getter(name="serviceNamespace")
def service_namespace(self) -> str:
"""
The namespace of the Istio service underlying this service. Corresponds to the destination_service_namespace metric label in Istio metrics.
"""
return pulumi.get(self, "service_namespace")
@pulumi.output_type
class MetricAbsenceResponse(dict):
"""
A condition type that checks that monitored resources are reporting data. The configuration defines a metric and a set of monitored resources. The predicate is considered in violation when a time series for the specified metric of a monitored resource does not include any data in the specified duration.
"""
def __init__(__self__, *,
aggregations: Sequence['outputs.AggregationResponse'],
duration: str,
filter: str,
trigger: 'outputs.TriggerResponse'):
"""
A condition type that checks that monitored resources are reporting data. The configuration defines a metric and a set of monitored resources. The predicate is considered in violation when a time series for the specified metric of a monitored resource does not include any data in the specified duration.
:param Sequence['AggregationResponse'] aggregations: Specifies the alignment of data points in individual time series as well as how to combine the retrieved time series together (such as when aggregating multiple streams on each resource to a single stream for each resource or when aggregating streams across all members of a group of resources). Multiple aggregations are applied in the order specified.This field is similar to the one in the ListTimeSeries request (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It is advisable to use the ListTimeSeries method when debugging this field.
:param str duration: The amount of time that a time series must fail to report new data to be considered failing. The minimum value of this field is 120 seconds. Larger values that are a multiple of a minute--for example, 240 or 300 seconds--are supported. If an invalid value is given, an error will be returned. The Duration.nanos field is ignored.
:param str filter: A filter (https://cloud.google.com/monitoring/api/v3/filters) that identifies which time series should be compared with the threshold.The filter is similar to the one that is specified in the ListTimeSeries request (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that call is useful to verify the time series that will be retrieved / processed). The filter must specify the metric type and the resource type. Optionally, it can specify resource labels and metric labels. This field must not exceed 2048 Unicode characters in length.
:param 'TriggerResponse' trigger: The number/percent of time series for which the comparison must hold in order for the condition to trigger. If unspecified, then the condition will trigger if the comparison is true for any of the time series that have been identified by filter and aggregations.
"""
pulumi.set(__self__, "aggregations", aggregations)
pulumi.set(__self__, "duration", duration)
pulumi.set(__self__, "filter", filter)
pulumi.set(__self__, "trigger", trigger)
@property
@pulumi.getter
def aggregations(self) -> Sequence['outputs.AggregationResponse']:
"""
Specifies the alignment of data points in individual time series as well as how to combine the retrieved time series together (such as when aggregating multiple streams on each resource to a single stream for each resource or when aggregating streams across all members of a group of resources). Multiple aggregations are applied in the order specified.This field is similar to the one in the ListTimeSeries request (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It is advisable to use the ListTimeSeries method when debugging this field.
"""
return pulumi.get(self, "aggregations")
@property
@pulumi.getter
def duration(self) -> str:
"""
The amount of time that a time series must fail to report new data to be considered failing. The minimum value of this field is 120 seconds. Larger values that are a multiple of a minute--for example, 240 or 300 seconds--are supported. If an invalid value is given, an error will be returned. The Duration.nanos field is ignored.
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter
def filter(self) -> str:
"""
A filter (https://cloud.google.com/monitoring/api/v3/filters) that identifies which time series should be compared with the threshold.The filter is similar to the one that is specified in the ListTimeSeries request (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that call is useful to verify the time series that will be retrieved / processed). The filter must specify the metric type and the resource type. Optionally, it can specify resource labels and metric labels. This field must not exceed 2048 Unicode characters in length.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter
def trigger(self) -> 'outputs.TriggerResponse':
"""
The number/percent of time series for which the comparison must hold in order for the condition to trigger. If unspecified, then the condition will trigger if the comparison is true for any of the time series that have been identified by filter and aggregations.
"""
return pulumi.get(self, "trigger")
@pulumi.output_type
class MetricDescriptorMetadataResponse(dict):
"""
Additional annotations that can be used to guide the usage of a metric.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ingestDelay":
suggest = "ingest_delay"
elif key == "samplePeriod":
suggest = "sample_period"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MetricDescriptorMetadataResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MetricDescriptorMetadataResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MetricDescriptorMetadataResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ingest_delay: str,
sample_period: str):
"""
Additional annotations that can be used to guide the usage of a metric.
:param str ingest_delay: The delay of data points caused by ingestion. Data points older than this age are guaranteed to be ingested and available to be read, excluding data loss due to errors.
:param str sample_period: The sampling period of metric data points. For metrics which are written periodically, consecutive data points are stored at this time interval, excluding data loss due to errors. Metrics with a higher granularity have a smaller sampling period.
"""
pulumi.set(__self__, "ingest_delay", ingest_delay)
pulumi.set(__self__, "sample_period", sample_period)
@property
@pulumi.getter(name="ingestDelay")
def ingest_delay(self) -> str:
"""
The delay of data points caused by ingestion. Data points older than this age are guaranteed to be ingested and available to be read, excluding data loss due to errors.
"""
return pulumi.get(self, "ingest_delay")
@property
@pulumi.getter(name="samplePeriod")
def sample_period(self) -> str:
"""
The sampling period of metric data points. For metrics which are written periodically, consecutive data points are stored at this time interval, excluding data loss due to errors. Metrics with a higher granularity have a smaller sampling period.
"""
return pulumi.get(self, "sample_period")
@pulumi.output_type
class MetricRangeResponse(dict):
"""
A MetricRange is used when each window is good when the value x of a single TimeSeries satisfies range.min <= x <= range.max. The provided TimeSeries must have ValueType = INT64 or ValueType = DOUBLE and MetricKind = GAUGE.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "timeSeries":
suggest = "time_series"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MetricRangeResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MetricRangeResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MetricRangeResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
range: 'outputs.GoogleMonitoringV3RangeResponse',
time_series: str):
"""
A MetricRange is used when each window is good when the value x of a single TimeSeries satisfies range.min <= x <= range.max. The provided TimeSeries must have ValueType = INT64 or ValueType = DOUBLE and MetricKind = GAUGE.
:param 'GoogleMonitoringV3RangeResponse' range: Range of values considered "good." For a one-sided range, set one bound to an infinite value.
:param str time_series: A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying the TimeSeries to use for evaluating window quality.
"""
pulumi.set(__self__, "range", range)
pulumi.set(__self__, "time_series", time_series)
@property
@pulumi.getter
def range(self) -> 'outputs.GoogleMonitoringV3RangeResponse':
"""
Range of values considered "good." For a one-sided range, set one bound to an infinite value.
"""
return pulumi.get(self, "range")
@property
@pulumi.getter(name="timeSeries")
def time_series(self) -> str:
"""
A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying the TimeSeries to use for evaluating window quality.
"""
return pulumi.get(self, "time_series")
@pulumi.output_type
class MetricThresholdResponse(dict):
"""
A condition type that compares a collection of time series against a threshold.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "denominatorAggregations":
suggest = "denominator_aggregations"
elif key == "denominatorFilter":
suggest = "denominator_filter"
elif key == "thresholdValue":
suggest = "threshold_value"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MetricThresholdResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MetricThresholdResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MetricThresholdResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
aggregations: Sequence['outputs.AggregationResponse'],
comparison: str,
denominator_aggregations: Sequence['outputs.AggregationResponse'],
denominator_filter: str,
duration: str,
filter: str,
threshold_value: float,
trigger: 'outputs.TriggerResponse'):
"""
A condition type that compares a collection of time series against a threshold.
:param Sequence['AggregationResponse'] aggregations: Specifies the alignment of data points in individual time series as well as how to combine the retrieved time series together (such as when aggregating multiple streams on each resource to a single stream for each resource or when aggregating streams across all members of a group of resources). Multiple aggregations are applied in the order specified.This field is similar to the one in the ListTimeSeries request (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It is advisable to use the ListTimeSeries method when debugging this field.
:param str comparison: The comparison to apply between the time series (indicated by filter and aggregation) and the threshold (indicated by threshold_value). The comparison is applied on each time series, with the time series on the left-hand side and the threshold on the right-hand side.Only COMPARISON_LT and COMPARISON_GT are supported currently.
:param Sequence['AggregationResponse'] denominator_aggregations: Specifies the alignment of data points in individual time series selected by denominatorFilter as well as how to combine the retrieved time series together (such as when aggregating multiple streams on each resource to a single stream for each resource or when aggregating streams across all members of a group of resources).When computing ratios, the aggregations and denominator_aggregations fields must use the same alignment period and produce time series that have the same periodicity and labels.
:param str denominator_filter: A filter (https://cloud.google.com/monitoring/api/v3/filters) that identifies a time series that should be used as the denominator of a ratio that will be compared with the threshold. If a denominator_filter is specified, the time series specified by the filter field will be used as the numerator.The filter must specify the metric type and optionally may contain restrictions on resource type, resource labels, and metric labels. This field may not exceed 2048 Unicode characters in length.
:param str duration: The amount of time that a time series must violate the threshold to be considered failing. Currently, only values that are a multiple of a minute--e.g., 0, 60, 120, or 300 seconds--are supported. If an invalid value is given, an error will be returned. When choosing a duration, it is useful to keep in mind the frequency of the underlying time series data (which may also be affected by any alignments specified in the aggregations field); a good duration is long enough so that a single outlier does not generate spurious alerts, but short enough that unhealthy states are detected and alerted on quickly.
:param str filter: A filter (https://cloud.google.com/monitoring/api/v3/filters) that identifies which time series should be compared with the threshold.The filter is similar to the one that is specified in the ListTimeSeries request (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that call is useful to verify the time series that will be retrieved / processed). The filter must specify the metric type and the resource type. Optionally, it can specify resource labels and metric labels. This field must not exceed 2048 Unicode characters in length.
:param float threshold_value: A value against which to compare the time series.
:param 'TriggerResponse' trigger: The number/percent of time series for which the comparison must hold in order for the condition to trigger. If unspecified, then the condition will trigger if the comparison is true for any of the time series that have been identified by filter and aggregations, or by the ratio, if denominator_filter and denominator_aggregations are specified.
"""
pulumi.set(__self__, "aggregations", aggregations)
pulumi.set(__self__, "comparison", comparison)
pulumi.set(__self__, "denominator_aggregations", denominator_aggregations)
pulumi.set(__self__, "denominator_filter", denominator_filter)
pulumi.set(__self__, "duration", duration)
pulumi.set(__self__, "filter", filter)
pulumi.set(__self__, "threshold_value", threshold_value)
pulumi.set(__self__, "trigger", trigger)
@property
@pulumi.getter
def aggregations(self) -> Sequence['outputs.AggregationResponse']:
"""
Specifies the alignment of data points in individual time series as well as how to combine the retrieved time series together (such as when aggregating multiple streams on each resource to a single stream for each resource or when aggregating streams across all members of a group of resources). Multiple aggregations are applied in the order specified.This field is similar to the one in the ListTimeSeries request (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It is advisable to use the ListTimeSeries method when debugging this field.
"""
return pulumi.get(self, "aggregations")
@property
@pulumi.getter
def comparison(self) -> str:
"""
The comparison to apply between the time series (indicated by filter and aggregation) and the threshold (indicated by threshold_value). The comparison is applied on each time series, with the time series on the left-hand side and the threshold on the right-hand side.Only COMPARISON_LT and COMPARISON_GT are supported currently.
"""
return pulumi.get(self, "comparison")
@property
@pulumi.getter(name="denominatorAggregations")
def denominator_aggregations(self) -> Sequence['outputs.AggregationResponse']:
"""
Specifies the alignment of data points in individual time series selected by denominatorFilter as well as how to combine the retrieved time series together (such as when aggregating multiple streams on each resource to a single stream for each resource or when aggregating streams across all members of a group of resources).When computing ratios, the aggregations and denominator_aggregations fields must use the same alignment period and produce time series that have the same periodicity and labels.
"""
return pulumi.get(self, "denominator_aggregations")
@property
@pulumi.getter(name="denominatorFilter")
def denominator_filter(self) -> str:
"""
A filter (https://cloud.google.com/monitoring/api/v3/filters) that identifies a time series that should be used as the denominator of a ratio that will be compared with the threshold. If a denominator_filter is specified, the time series specified by the filter field will be used as the numerator.The filter must specify the metric type and optionally may contain restrictions on resource type, resource labels, and metric labels. This field may not exceed 2048 Unicode characters in length.
"""
return pulumi.get(self, "denominator_filter")
@property
@pulumi.getter
def duration(self) -> str:
"""
The amount of time that a time series must violate the threshold to be considered failing. Currently, only values that are a multiple of a minute--e.g., 0, 60, 120, or 300 seconds--are supported. If an invalid value is given, an error will be returned. When choosing a duration, it is useful to keep in mind the frequency of the underlying time series data (which may also be affected by any alignments specified in the aggregations field); a good duration is long enough so that a single outlier does not generate spurious alerts, but short enough that unhealthy states are detected and alerted on quickly.
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter
def filter(self) -> str:
"""
A filter (https://cloud.google.com/monitoring/api/v3/filters) that identifies which time series should be compared with the threshold.The filter is similar to the one that is specified in the ListTimeSeries request (https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that call is useful to verify the time series that will be retrieved / processed). The filter must specify the metric type and the resource type. Optionally, it can specify resource labels and metric labels. This field must not exceed 2048 Unicode characters in length.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter(name="thresholdValue")
def threshold_value(self) -> float:
"""
A value against which to compare the time series.
"""
return pulumi.get(self, "threshold_value")
@property
@pulumi.getter
def trigger(self) -> 'outputs.TriggerResponse':
"""
The number/percent of time series for which the comparison must hold in order for the condition to trigger. If unspecified, then the condition will trigger if the comparison is true for any of the time series that have been identified by filter and aggregations, or by the ratio, if denominator_filter and denominator_aggregations are specified.
"""
return pulumi.get(self, "trigger")
@pulumi.output_type
class MonitoredResourceResponse(dict):
"""
An object representing a resource that can be used for monitoring, logging, billing, or other purposes. Examples include virtual machine instances, databases, and storage devices such as disks. The type field identifies a MonitoredResourceDescriptor object that describes the resource's schema. Information in the labels field identifies the actual resource and its attributes according to the schema. For example, a particular Compute Engine VM instance could be represented by the following object, because the MonitoredResourceDescriptor for "gce_instance" has labels "instance_id" and "zone": { "type": "gce_instance", "labels": { "instance_id": "12345678901234", "zone": "us-central1-a" }}
"""
def __init__(__self__, *,
labels: Mapping[str, str],
type: str):
"""
An object representing a resource that can be used for monitoring, logging, billing, or other purposes. Examples include virtual machine instances, databases, and storage devices such as disks. The type field identifies a MonitoredResourceDescriptor object that describes the resource's schema. Information in the labels field identifies the actual resource and its attributes according to the schema. For example, a particular Compute Engine VM instance could be represented by the following object, because the MonitoredResourceDescriptor for "gce_instance" has labels "instance_id" and "zone": { "type": "gce_instance", "labels": { "instance_id": "12345678901234", "zone": "us-central1-a" }}
:param Mapping[str, str] labels: Values for all of the labels listed in the associated monitored resource descriptor. For example, Compute Engine VM instances use the labels "project_id", "instance_id", and "zone".
:param str type: The monitored resource type. This field must match the type field of a MonitoredResourceDescriptor object. For example, the type of a Compute Engine VM instance is gce_instance. For a list of types, see Monitoring resource types (https://cloud.google.com/monitoring/api/resources) and Logging resource types (https://cloud.google.com/logging/docs/api/v2/resource-list).
"""
pulumi.set(__self__, "labels", labels)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
Values for all of the labels listed in the associated monitored resource descriptor. For example, Compute Engine VM instances use the labels "project_id", "instance_id", and "zone".
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def type(self) -> str:
"""
The monitored resource type. This field must match the type field of a MonitoredResourceDescriptor object. For example, the type of a Compute Engine VM instance is gce_instance. For a list of types, see Monitoring resource types (https://cloud.google.com/monitoring/api/resources) and Logging resource types (https://cloud.google.com/logging/docs/api/v2/resource-list).
"""
return pulumi.get(self, "type")
@pulumi.output_type
class MonitoringQueryLanguageConditionResponse(dict):
"""
A condition type that allows alert policies to be defined using Monitoring Query Language (https://cloud.google.com/monitoring/mql).
"""
def __init__(__self__, *,
duration: str,
query: str,
trigger: 'outputs.TriggerResponse'):
"""
A condition type that allows alert policies to be defined using Monitoring Query Language (https://cloud.google.com/monitoring/mql).
:param str duration: The amount of time that a time series must violate the threshold to be considered failing. Currently, only values that are a multiple of a minute--e.g., 0, 60, 120, or 300 seconds--are supported. If an invalid value is given, an error will be returned. When choosing a duration, it is useful to keep in mind the frequency of the underlying time series data (which may also be affected by any alignments specified in the aggregations field); a good duration is long enough so that a single outlier does not generate spurious alerts, but short enough that unhealthy states are detected and alerted on quickly.
:param str query: Monitoring Query Language (https://cloud.google.com/monitoring/mql) query that outputs a boolean stream.
:param 'TriggerResponse' trigger: The number/percent of time series for which the comparison must hold in order for the condition to trigger. If unspecified, then the condition will trigger if the comparison is true for any of the time series that have been identified by filter and aggregations, or by the ratio, if denominator_filter and denominator_aggregations are specified.
"""
pulumi.set(__self__, "duration", duration)
pulumi.set(__self__, "query", query)
pulumi.set(__self__, "trigger", trigger)
@property
@pulumi.getter
def duration(self) -> str:
"""
The amount of time that a time series must violate the threshold to be considered failing. Currently, only values that are a multiple of a minute--e.g., 0, 60, 120, or 300 seconds--are supported. If an invalid value is given, an error will be returned. When choosing a duration, it is useful to keep in mind the frequency of the underlying time series data (which may also be affected by any alignments specified in the aggregations field); a good duration is long enough so that a single outlier does not generate spurious alerts, but short enough that unhealthy states are detected and alerted on quickly.
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter
def query(self) -> str:
"""
Monitoring Query Language (https://cloud.google.com/monitoring/mql) query that outputs a boolean stream.
"""
return pulumi.get(self, "query")
@property
@pulumi.getter
def trigger(self) -> 'outputs.TriggerResponse':
"""
The number/percent of time series for which the comparison must hold in order for the condition to trigger. If unspecified, then the condition will trigger if the comparison is true for any of the time series that have been identified by filter and aggregations, or by the ratio, if denominator_filter and denominator_aggregations are specified.
"""
return pulumi.get(self, "trigger")
@pulumi.output_type
class MutationRecordResponse(dict):
"""
Describes a change made to a configuration.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "mutateTime":
suggest = "mutate_time"
elif key == "mutatedBy":
suggest = "mutated_by"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MutationRecordResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MutationRecordResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MutationRecordResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
mutate_time: str,
mutated_by: str):
"""
Describes a change made to a configuration.
:param str mutate_time: When the change occurred.
:param str mutated_by: The email address of the user making the change.
"""
pulumi.set(__self__, "mutate_time", mutate_time)
pulumi.set(__self__, "mutated_by", mutated_by)
@property
@pulumi.getter(name="mutateTime")
def mutate_time(self) -> str:
"""
When the change occurred.
"""
return pulumi.get(self, "mutate_time")
@property
@pulumi.getter(name="mutatedBy")
def mutated_by(self) -> str:
"""
The email address of the user making the change.
"""
return pulumi.get(self, "mutated_by")
@pulumi.output_type
class NotificationRateLimitResponse(dict):
"""
Control over the rate of notifications sent to this alert policy's notification channels.
"""
def __init__(__self__, *,
period: str):
"""
Control over the rate of notifications sent to this alert policy's notification channels.
:param str period: Not more than one notification per period.
"""
pulumi.set(__self__, "period", period)
@property
@pulumi.getter
def period(self) -> str:
"""
Not more than one notification per period.
"""
return pulumi.get(self, "period")
@pulumi.output_type
class PerformanceThresholdResponse(dict):
"""
A PerformanceThreshold is used when each window is good when that window has a sufficiently high performance.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "basicSliPerformance":
suggest = "basic_sli_performance"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PerformanceThresholdResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PerformanceThresholdResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PerformanceThresholdResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
basic_sli_performance: 'outputs.BasicSliResponse',
performance: 'outputs.RequestBasedSliResponse',
threshold: float):
"""
A PerformanceThreshold is used when each window is good when that window has a sufficiently high performance.
:param 'BasicSliResponse' basic_sli_performance: BasicSli to evaluate to judge window quality.
:param 'RequestBasedSliResponse' performance: RequestBasedSli to evaluate to judge window quality.
:param float threshold: If window performance >= threshold, the window is counted as good.
"""
pulumi.set(__self__, "basic_sli_performance", basic_sli_performance)
pulumi.set(__self__, "performance", performance)
pulumi.set(__self__, "threshold", threshold)
@property
@pulumi.getter(name="basicSliPerformance")
def basic_sli_performance(self) -> 'outputs.BasicSliResponse':
"""
BasicSli to evaluate to judge window quality.
"""
return pulumi.get(self, "basic_sli_performance")
@property
@pulumi.getter
def performance(self) -> 'outputs.RequestBasedSliResponse':
"""
RequestBasedSli to evaluate to judge window quality.
"""
return pulumi.get(self, "performance")
@property
@pulumi.getter
def threshold(self) -> float:
"""
If window performance >= threshold, the window is counted as good.
"""
return pulumi.get(self, "threshold")
@pulumi.output_type
class RequestBasedSliResponse(dict):
"""
Service Level Indicators for which atomic units of service are counted directly.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "distributionCut":
suggest = "distribution_cut"
elif key == "goodTotalRatio":
suggest = "good_total_ratio"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RequestBasedSliResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RequestBasedSliResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RequestBasedSliResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
distribution_cut: 'outputs.DistributionCutResponse',
good_total_ratio: 'outputs.TimeSeriesRatioResponse'):
"""
Service Level Indicators for which atomic units of service are counted directly.
:param 'DistributionCutResponse' distribution_cut: distribution_cut is used when good_service is a count of values aggregated in a Distribution that fall into a good range. The total_service is the total count of all values aggregated in the Distribution.
:param 'TimeSeriesRatioResponse' good_total_ratio: good_total_ratio is used when the ratio of good_service to total_service is computed from two TimeSeries.
"""
pulumi.set(__self__, "distribution_cut", distribution_cut)
pulumi.set(__self__, "good_total_ratio", good_total_ratio)
@property
@pulumi.getter(name="distributionCut")
def distribution_cut(self) -> 'outputs.DistributionCutResponse':
"""
distribution_cut is used when good_service is a count of values aggregated in a Distribution that fall into a good range. The total_service is the total count of all values aggregated in the Distribution.
"""
return pulumi.get(self, "distribution_cut")
@property
@pulumi.getter(name="goodTotalRatio")
def good_total_ratio(self) -> 'outputs.TimeSeriesRatioResponse':
"""
good_total_ratio is used when the ratio of good_service to total_service is computed from two TimeSeries.
"""
return pulumi.get(self, "good_total_ratio")
@pulumi.output_type
class ResourceGroupResponse(dict):
"""
The resource submessage for group checks. It can be used instead of a monitored resource, when multiple resources are being monitored.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "groupId":
suggest = "group_id"
elif key == "resourceType":
suggest = "resource_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ResourceGroupResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ResourceGroupResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ResourceGroupResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
group_id: str,
resource_type: str):
"""
The resource submessage for group checks. It can be used instead of a monitored resource, when multiple resources are being monitored.
:param str group_id: The group of resources being monitored. Should be only the [GROUP_ID], and not the full-path projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID].
:param str resource_type: The resource type of the group members.
"""
pulumi.set(__self__, "group_id", group_id)
pulumi.set(__self__, "resource_type", resource_type)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> str:
"""
The group of resources being monitored. Should be only the [GROUP_ID], and not the full-path projects/[PROJECT_ID_OR_NUMBER]/groups/[GROUP_ID].
"""
return pulumi.get(self, "group_id")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> str:
"""
The resource type of the group members.
"""
return pulumi.get(self, "resource_type")
@pulumi.output_type
class ServiceLevelIndicatorResponse(dict):
"""
A Service-Level Indicator (SLI) describes the "performance" of a service. For some services, the SLI is well-defined. In such cases, the SLI can be described easily by referencing the well-known SLI and providing the needed parameters. Alternatively, a "custom" SLI can be defined with a query to the underlying metric store. An SLI is defined to be good_service / total_service over any queried time interval. The value of performance always falls into the range 0 <= performance <= 1. A custom SLI describes how to compute this ratio, whether this is by dividing values from a pair of time series, cutting a Distribution into good and bad counts, or counting time windows in which the service complies with a criterion. For separation of concerns, a single Service-Level Indicator measures performance for only one aspect of service quality, such as fraction of successful queries or fast-enough queries.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "basicSli":
suggest = "basic_sli"
elif key == "requestBased":
suggest = "request_based"
elif key == "windowsBased":
suggest = "windows_based"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServiceLevelIndicatorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServiceLevelIndicatorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServiceLevelIndicatorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
basic_sli: 'outputs.BasicSliResponse',
request_based: 'outputs.RequestBasedSliResponse',
windows_based: 'outputs.WindowsBasedSliResponse'):
"""
A Service-Level Indicator (SLI) describes the "performance" of a service. For some services, the SLI is well-defined. In such cases, the SLI can be described easily by referencing the well-known SLI and providing the needed parameters. Alternatively, a "custom" SLI can be defined with a query to the underlying metric store. An SLI is defined to be good_service / total_service over any queried time interval. The value of performance always falls into the range 0 <= performance <= 1. A custom SLI describes how to compute this ratio, whether this is by dividing values from a pair of time series, cutting a Distribution into good and bad counts, or counting time windows in which the service complies with a criterion. For separation of concerns, a single Service-Level Indicator measures performance for only one aspect of service quality, such as fraction of successful queries or fast-enough queries.
:param 'BasicSliResponse' basic_sli: Basic SLI on a well-known service type.
:param 'RequestBasedSliResponse' request_based: Request-based SLIs
:param 'WindowsBasedSliResponse' windows_based: Windows-based SLIs
"""
pulumi.set(__self__, "basic_sli", basic_sli)
pulumi.set(__self__, "request_based", request_based)
pulumi.set(__self__, "windows_based", windows_based)
@property
@pulumi.getter(name="basicSli")
def basic_sli(self) -> 'outputs.BasicSliResponse':
"""
Basic SLI on a well-known service type.
"""
return pulumi.get(self, "basic_sli")
@property
@pulumi.getter(name="requestBased")
def request_based(self) -> 'outputs.RequestBasedSliResponse':
"""
Request-based SLIs
"""
return pulumi.get(self, "request_based")
@property
@pulumi.getter(name="windowsBased")
def windows_based(self) -> 'outputs.WindowsBasedSliResponse':
"""
Windows-based SLIs
"""
return pulumi.get(self, "windows_based")
@pulumi.output_type
class StatusResponse(dict):
"""
The Status type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by gRPC (https://github.com/grpc). Each Status message contains three pieces of data: error code, error message, and error details.You can find out more about this error model and how to work with it in the API Design Guide (https://cloud.google.com/apis/design/errors).
"""
def __init__(__self__, *,
code: int,
details: Sequence[Mapping[str, str]],
message: str):
"""
The Status type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by gRPC (https://github.com/grpc). Each Status message contains three pieces of data: error code, error message, and error details.You can find out more about this error model and how to work with it in the API Design Guide (https://cloud.google.com/apis/design/errors).
:param int code: The status code, which should be an enum value of google.rpc.Code.
:param Sequence[Mapping[str, str]] details: A list of messages that carry the error details. There is a common set of message types for APIs to use.
:param str message: A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.
"""
pulumi.set(__self__, "code", code)
pulumi.set(__self__, "details", details)
pulumi.set(__self__, "message", message)
@property
@pulumi.getter
def code(self) -> int:
"""
The status code, which should be an enum value of google.rpc.Code.
"""
return pulumi.get(self, "code")
@property
@pulumi.getter
def details(self) -> Sequence[Mapping[str, str]]:
"""
A list of messages that carry the error details. There is a common set of message types for APIs to use.
"""
return pulumi.get(self, "details")
@property
@pulumi.getter
def message(self) -> str:
"""
A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.
"""
return pulumi.get(self, "message")
@pulumi.output_type
class TcpCheckResponse(dict):
"""
Information required for a TCP Uptime check request.
"""
def __init__(__self__, *,
port: int):
"""
Information required for a TCP Uptime check request.
:param int port: The TCP port on the server against which to run the check. Will be combined with host (specified within the monitored_resource) to construct the full URL. Required.
"""
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def port(self) -> int:
"""
The TCP port on the server against which to run the check. Will be combined with host (specified within the monitored_resource) to construct the full URL. Required.
"""
return pulumi.get(self, "port")
@pulumi.output_type
class TelemetryResponse(dict):
"""
Configuration for how to query telemetry on a Service.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resourceName":
suggest = "resource_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in TelemetryResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
TelemetryResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
TelemetryResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
resource_name: str):
"""
Configuration for how to query telemetry on a Service.
:param str resource_name: The full name of the resource that defines this service. Formatted as described in https://cloud.google.com/apis/design/resource_names.
"""
pulumi.set(__self__, "resource_name", resource_name)
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> str:
"""
The full name of the resource that defines this service. Formatted as described in https://cloud.google.com/apis/design/resource_names.
"""
return pulumi.get(self, "resource_name")
@pulumi.output_type
class TimeSeriesRatioResponse(dict):
"""
A TimeSeriesRatio specifies two TimeSeries to use for computing the good_service / total_service ratio. The specified TimeSeries must have ValueType = DOUBLE or ValueType = INT64 and must have MetricKind = DELTA or MetricKind = CUMULATIVE. The TimeSeriesRatio must specify exactly two of good, bad, and total, and the relationship good_service + bad_service = total_service will be assumed.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "badServiceFilter":
suggest = "bad_service_filter"
elif key == "goodServiceFilter":
suggest = "good_service_filter"
elif key == "totalServiceFilter":
suggest = "total_service_filter"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in TimeSeriesRatioResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
TimeSeriesRatioResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
TimeSeriesRatioResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
bad_service_filter: str,
good_service_filter: str,
total_service_filter: str):
"""
A TimeSeriesRatio specifies two TimeSeries to use for computing the good_service / total_service ratio. The specified TimeSeries must have ValueType = DOUBLE or ValueType = INT64 and must have MetricKind = DELTA or MetricKind = CUMULATIVE. The TimeSeriesRatio must specify exactly two of good, bad, and total, and the relationship good_service + bad_service = total_service will be assumed.
:param str bad_service_filter: A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries quantifying bad service, either demanded service that was not provided or demanded service that was of inadequate quality. Must have ValueType = DOUBLE or ValueType = INT64 and must have MetricKind = DELTA or MetricKind = CUMULATIVE.
:param str good_service_filter: A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries quantifying good service provided. Must have ValueType = DOUBLE or ValueType = INT64 and must have MetricKind = DELTA or MetricKind = CUMULATIVE.
:param str total_service_filter: A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries quantifying total demanded service. Must have ValueType = DOUBLE or ValueType = INT64 and must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
pulumi.set(__self__, "bad_service_filter", bad_service_filter)
pulumi.set(__self__, "good_service_filter", good_service_filter)
pulumi.set(__self__, "total_service_filter", total_service_filter)
@property
@pulumi.getter(name="badServiceFilter")
def bad_service_filter(self) -> str:
"""
A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries quantifying bad service, either demanded service that was not provided or demanded service that was of inadequate quality. Must have ValueType = DOUBLE or ValueType = INT64 and must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "bad_service_filter")
@property
@pulumi.getter(name="goodServiceFilter")
def good_service_filter(self) -> str:
"""
A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries quantifying good service provided. Must have ValueType = DOUBLE or ValueType = INT64 and must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "good_service_filter")
@property
@pulumi.getter(name="totalServiceFilter")
def total_service_filter(self) -> str:
"""
A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries quantifying total demanded service. Must have ValueType = DOUBLE or ValueType = INT64 and must have MetricKind = DELTA or MetricKind = CUMULATIVE.
"""
return pulumi.get(self, "total_service_filter")
@pulumi.output_type
class TriggerResponse(dict):
"""
Specifies how many time series must fail a predicate to trigger a condition. If not specified, then a {count: 1} trigger is used.
"""
def __init__(__self__, *,
count: int,
percent: float):
"""
Specifies how many time series must fail a predicate to trigger a condition. If not specified, then a {count: 1} trigger is used.
:param int count: The absolute number of time series that must fail the predicate for the condition to be triggered.
:param float percent: The percentage of time series that must fail the predicate for the condition to be triggered.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def count(self) -> int:
"""
The absolute number of time series that must fail the predicate for the condition to be triggered.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def percent(self) -> float:
"""
The percentage of time series that must fail the predicate for the condition to be triggered.
"""
return pulumi.get(self, "percent")
@pulumi.output_type
class WindowsBasedSliResponse(dict):
"""
A WindowsBasedSli defines good_service as the count of time windows for which the provided service was of good quality. Criteria for determining if service was good are embedded in the window_criterion.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "goodBadMetricFilter":
suggest = "good_bad_metric_filter"
elif key == "goodTotalRatioThreshold":
suggest = "good_total_ratio_threshold"
elif key == "metricMeanInRange":
suggest = "metric_mean_in_range"
elif key == "metricSumInRange":
suggest = "metric_sum_in_range"
elif key == "windowPeriod":
suggest = "window_period"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in WindowsBasedSliResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
WindowsBasedSliResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
WindowsBasedSliResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
good_bad_metric_filter: str,
good_total_ratio_threshold: 'outputs.PerformanceThresholdResponse',
metric_mean_in_range: 'outputs.MetricRangeResponse',
metric_sum_in_range: 'outputs.MetricRangeResponse',
window_period: str):
"""
A WindowsBasedSli defines good_service as the count of time windows for which the provided service was of good quality. Criteria for determining if service was good are embedded in the window_criterion.
:param str good_bad_metric_filter: A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries with ValueType = BOOL. The window is good if any true values appear in the window.
:param 'PerformanceThresholdResponse' good_total_ratio_threshold: A window is good if its performance is high enough.
:param 'MetricRangeResponse' metric_mean_in_range: A window is good if the metric's value is in a good range, averaged across returned streams.
:param 'MetricRangeResponse' metric_sum_in_range: A window is good if the metric's value is in a good range, summed across returned streams.
:param str window_period: Duration over which window quality is evaluated. Must be an integer fraction of a day and at least 60s.
"""
pulumi.set(__self__, "good_bad_metric_filter", good_bad_metric_filter)
pulumi.set(__self__, "good_total_ratio_threshold", good_total_ratio_threshold)
pulumi.set(__self__, "metric_mean_in_range", metric_mean_in_range)
pulumi.set(__self__, "metric_sum_in_range", metric_sum_in_range)
pulumi.set(__self__, "window_period", window_period)
@property
@pulumi.getter(name="goodBadMetricFilter")
def good_bad_metric_filter(self) -> str:
"""
A monitoring filter (https://cloud.google.com/monitoring/api/v3/filters) specifying a TimeSeries with ValueType = BOOL. The window is good if any true values appear in the window.
"""
return pulumi.get(self, "good_bad_metric_filter")
@property
@pulumi.getter(name="goodTotalRatioThreshold")
def good_total_ratio_threshold(self) -> 'outputs.PerformanceThresholdResponse':
"""
A window is good if its performance is high enough.
"""
return pulumi.get(self, "good_total_ratio_threshold")
@property
@pulumi.getter(name="metricMeanInRange")
def metric_mean_in_range(self) -> 'outputs.MetricRangeResponse':
"""
A window is good if the metric's value is in a good range, averaged across returned streams.
"""
return pulumi.get(self, "metric_mean_in_range")
@property
@pulumi.getter(name="metricSumInRange")
def metric_sum_in_range(self) -> 'outputs.MetricRangeResponse':
"""
A window is good if the metric's value is in a good range, summed across returned streams.
"""
return pulumi.get(self, "metric_sum_in_range")
@property
@pulumi.getter(name="windowPeriod")
def window_period(self) -> str:
"""
Duration over which window quality is evaluated. Must be an integer fraction of a day and at least 60s.
"""
return pulumi.get(self, "window_period")
| 56.75161
| 1,541
| 0.70348
| 16,144
| 123,378
| 5.246283
| 0.063739
| 0.017356
| 0.016884
| 0.024676
| 0.809389
| 0.783248
| 0.774464
| 0.750567
| 0.738535
| 0.730141
| 0
| 0.003531
| 0.221952
| 123,378
| 2,173
| 1,542
| 56.777727
| 0.878774
| 0.559524
| 0
| 0.503722
| 1
| 0.019851
| 0.203588
| 0.069993
| 0
| 0
| 0
| 0
| 0
| 1
| 0.182796
| false
| 0.004963
| 0.00579
| 0
| 0.35153
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6250ae6a6e22f4d811fe434bb34e69a45bfe9b76
| 15,625
|
py
|
Python
|
src/the_tale/the_tale/game/companions/tests/test_abilities_container.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 85
|
2017-11-21T12:22:02.000Z
|
2022-03-27T23:07:17.000Z
|
src/the_tale/the_tale/game/companions/tests/test_abilities_container.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 545
|
2017-11-04T14:15:04.000Z
|
2022-03-27T14:19:27.000Z
|
src/the_tale/the_tale/game/companions/tests/test_abilities_container.py
|
al-arz/the-tale
|
542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5
|
[
"BSD-3-Clause"
] | 45
|
2017-11-11T12:36:30.000Z
|
2022-02-25T06:10:44.000Z
|
import smart_imports
smart_imports.all()
effects = companions_abilities_effects
class ContainerTests(utils_testcase.TestCase):
def setUp(self):
super(ContainerTests, self).setUp()
self.container_1 = companions_abilities_container.Container(common=(effects.ABILITIES.PEACEFUL, effects.ABILITIES.SNEAKY),
start=frozenset(),
coherence=effects.ABILITIES.MANAGING,
honor=None,
peacefulness=effects.ABILITIES.AGGRESSIVE)
self.container_2 = companions_abilities_container.Container(common=(effects.ABILITIES.PEACEFUL, effects.ABILITIES.HONEST, effects.ABILITIES.CANNY),
start=frozenset((effects.ABILITIES.BONA_FIDE, effects.ABILITIES.PEACEFUL)),
coherence=None,
honor=effects.ABILITIES.SNEAKY,
peacefulness=effects.ABILITIES.AGGRESSIVE)
def test_initialization(self):
self.assertEqual(self.container_1.common, (effects.ABILITIES.PEACEFUL, effects.ABILITIES.SNEAKY))
self.assertEqual(self.container_1.start, frozenset())
self.assertEqual(self.container_1.coherence, effects.ABILITIES.MANAGING)
self.assertEqual(self.container_1.honor, None)
self.assertEqual(self.container_1.peacefulness, effects.ABILITIES.AGGRESSIVE)
self.assertEqual(self.container_2.common, (effects.ABILITIES.PEACEFUL, effects.ABILITIES.HONEST, effects.ABILITIES.CANNY))
self.assertEqual(self.container_2.start, frozenset((effects.ABILITIES.BONA_FIDE, effects.ABILITIES.PEACEFUL)))
self.assertEqual(self.container_2.coherence, None)
self.assertEqual(self.container_2.honor, effects.ABILITIES.SNEAKY)
self.assertEqual(self.container_2.peacefulness, effects.ABILITIES.AGGRESSIVE)
def test_serialization(self):
self.assertEqual(self.container_1.serialize(), companions_abilities_container.Container.deserialize(self.container_1.serialize()).serialize())
self.assertEqual(self.container_2.serialize(), companions_abilities_container.Container.deserialize(self.container_2.serialize()).serialize())
def test_has_duplicates(self):
self.assertFalse(self.container_1.has_duplicates())
self.assertTrue(self.container_2.has_duplicates())
def test_has_same_effects(self):
self.assertTrue(self.container_1.has_same_effects())
self.assertTrue(self.container_2.has_same_effects())
container_3 = companions_abilities_container.Container(common=(effects.ABILITIES.OBSTINATE, effects.ABILITIES.PEACEFUL),
start=frozenset((effects.ABILITIES.CANNY,)),
coherence=None,
honor=None,
peacefulness=None)
self.assertFalse(container_3.has_same_effects())
def test_not_ordered(self):
self.assertRaises(companions_abilities_exceptions.NotOrderedUIDSError, companions_abilities_container.Container,
common=set((effects.ABILITIES.PEACEFUL, effects.ABILITIES.HONEST, effects.ABILITIES.CANNY)))
def test_start_abilities(self):
self.assertEqual(set(ability for ability in self.container_1.start_abilities),
set((effects.ABILITIES.MANAGING, effects.ABILITIES.AGGRESSIVE)))
self.assertEqual(set(ability for ability in self.container_2.start_abilities),
set((effects.ABILITIES.BONA_FIDE, effects.ABILITIES.PEACEFUL, effects.ABILITIES.SNEAKY, effects.ABILITIES.AGGRESSIVE)))
def test_coherence_abilities(self):
self.assertEqual([(coherence, ability) for coherence, ability in self.container_1.coherence_abilities],
[(33, effects.ABILITIES.PEACEFUL), (66, effects.ABILITIES.SNEAKY)])
self.assertEqual([(coherence, ability) for coherence, ability in self.container_2.coherence_abilities],
[(25, effects.ABILITIES.PEACEFUL), (50, effects.ABILITIES.HONEST), (75, effects.ABILITIES.CANNY)])
def test_all_abilities(self):
self.assertEqual(set((coherence, ability) for coherence, ability in self.container_1.all_abilities),
set(((33, effects.ABILITIES.PEACEFUL), (66, effects.ABILITIES.SNEAKY), (0, effects.ABILITIES.MANAGING), (0, effects.ABILITIES.AGGRESSIVE))))
self.assertEqual(set((coherence, ability) for coherence, ability in self.container_2.all_abilities),
set(((25, effects.ABILITIES.PEACEFUL), (50, effects.ABILITIES.HONEST), (75, effects.ABILITIES.CANNY),
(0, effects.ABILITIES.BONA_FIDE), (0, effects.ABILITIES.PEACEFUL), (0, effects.ABILITIES.SNEAKY), (0, effects.ABILITIES.AGGRESSIVE))))
def check_abilities_for_coherence(self, container, coherence, uids):
self.assertEqual(set(ability.value for ability in container.abilities_for_coherence(coherence)),
set(uids))
def test_abilities_for_coherence(self):
self.check_abilities_for_coherence(self.container_1, 100, [5, 9, 3, 4])
self.check_abilities_for_coherence(self.container_1, 66, [5, 9, 3, 4])
self.check_abilities_for_coherence(self.container_1, 50, [5, 3, 4])
self.check_abilities_for_coherence(self.container_1, 33, [5, 3, 4])
self.check_abilities_for_coherence(self.container_1, 32, [3, 4])
self.check_abilities_for_coherence(self.container_1, 9, [3, 4])
self.check_abilities_for_coherence(self.container_1, 0, [3, 4])
self.check_abilities_for_coherence(self.container_2, 100, [5, 8, 7, 2, 5, 9, 4])
self.check_abilities_for_coherence(self.container_2, 75, [5, 8, 7, 2, 5, 9, 4])
self.check_abilities_for_coherence(self.container_2, 50, [5, 8, 2, 5, 9, 4])
self.check_abilities_for_coherence(self.container_2, 25, [5, 2, 5, 9, 4])
self.check_abilities_for_coherence(self.container_2, 24, [2, 5, 9, 4])
self.check_abilities_for_coherence(self.container_2, 0, [2, 5, 9, 4])
def test_modify_attribute(self):
with mock.patch('the_tale.game.companions.abilities.effects.Base.modify_attribute', lambda self, abilities_levels, modifier, value: value * 2):
self.assertEqual(self.container_1.modify_attribute(100, {}, None, 1, is_dead=False), 16)
self.assertEqual(self.container_1.modify_attribute(66, {}, None, 1, is_dead=False), 16)
self.assertEqual(self.container_1.modify_attribute(50, {}, None, 1, is_dead=False), 8)
self.assertEqual(self.container_1.modify_attribute(33, {}, None, 1, is_dead=False), 8)
self.assertEqual(self.container_1.modify_attribute(32, {}, None, 1, is_dead=False), 4)
self.assertEqual(self.container_1.modify_attribute(9, {}, None, 1, is_dead=False), 4)
self.assertEqual(self.container_2.modify_attribute(100, {}, None, 1, is_dead=False), 128)
self.assertEqual(self.container_2.modify_attribute(75, {}, None, 1, is_dead=False), 128)
self.assertEqual(self.container_2.modify_attribute(50, {}, None, 1, is_dead=False), 64)
self.assertEqual(self.container_2.modify_attribute(25, {}, None, 1, is_dead=False), 32)
self.assertEqual(self.container_2.modify_attribute(24, {}, None, 1, is_dead=False), 16)
self.assertEqual(self.container_2.modify_attribute(0, {}, None, 1, is_dead=False), 16)
self.assertEqual(self.container_1.modify_attribute(100, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_1.modify_attribute(66, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_1.modify_attribute(50, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_1.modify_attribute(33, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_1.modify_attribute(32, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_1.modify_attribute(9, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_2.modify_attribute(100, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_2.modify_attribute(75, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_2.modify_attribute(50, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_2.modify_attribute(25, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_2.modify_attribute(24, {}, None, 1, is_dead=True), 1)
self.assertEqual(self.container_2.modify_attribute(0, {}, None, 1, is_dead=True), 1)
def test_check_attribute(self):
with mock.patch('the_tale.game.companions.abilities.effects.Base.check_attribute', lambda self, modifier: True):
self.assertEqual(self.container_1.check_attribute(100, None, is_dead=False), True)
self.assertEqual(self.container_1.check_attribute(66, None, is_dead=False), True)
self.assertEqual(self.container_1.check_attribute(50, None, is_dead=False), True)
self.assertEqual(self.container_1.check_attribute(33, None, is_dead=False), True)
self.assertEqual(self.container_1.check_attribute(32, None, is_dead=False), True)
self.assertEqual(self.container_1.check_attribute(9, None, is_dead=False), True)
self.assertEqual(self.container_2.check_attribute(100, None, is_dead=False), True)
self.assertEqual(self.container_2.check_attribute(75, None, is_dead=False), True)
self.assertEqual(self.container_2.check_attribute(50, None, is_dead=False), True)
self.assertEqual(self.container_2.check_attribute(25, None, is_dead=False), True)
self.assertEqual(self.container_2.check_attribute(24, None, is_dead=False), True)
self.assertEqual(self.container_2.check_attribute(0, None, is_dead=False), True)
self.assertEqual(self.container_1.check_attribute(100, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(66, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(50, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(33, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(32, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(9, None, is_dead=True), False)
self.assertEqual(self.container_2.check_attribute(100, None, is_dead=True), False)
self.assertEqual(self.container_2.check_attribute(75, None, is_dead=True), False)
self.assertEqual(self.container_2.check_attribute(50, None, is_dead=True), False)
self.assertEqual(self.container_2.check_attribute(25, None, is_dead=True), False)
self.assertEqual(self.container_2.check_attribute(24, None, is_dead=True), False)
self.assertEqual(self.container_2.check_attribute(0, None, is_dead=True), False)
with mock.patch('the_tale.game.companions.abilities.effects.Base.check_attribute', lambda self, modifier: False):
self.assertEqual(self.container_1.check_attribute(100, None, is_dead=False), False)
self.assertEqual(self.container_1.check_attribute(66, None, is_dead=False), False)
self.assertEqual(self.container_1.check_attribute(50, None, is_dead=False), False)
self.assertEqual(self.container_1.check_attribute(33, None, is_dead=False), False)
self.assertEqual(self.container_1.check_attribute(32, None, is_dead=False), False)
self.assertEqual(self.container_1.check_attribute(9, None, is_dead=False), False)
self.assertEqual(self.container_2.check_attribute(100, None, is_dead=False), False)
self.assertEqual(self.container_2.check_attribute(75, None, is_dead=False), False)
self.assertEqual(self.container_2.check_attribute(50, None, is_dead=False), False)
self.assertEqual(self.container_2.check_attribute(25, None, is_dead=False), False)
self.assertEqual(self.container_2.check_attribute(24, None, is_dead=False), False)
self.assertEqual(self.container_2.check_attribute(0, None, is_dead=False), False)
def test_modify_attribute__allow_for_dead(self):
self.container_1.start = frozenset(self.container_1.start | set((effects.ABILITIES.TEMPORARY,)))
with mock.patch('the_tale.game.companions.abilities.effects.Base.modify_attribute', lambda self, abilities_levels, modifier, value: value * 2):
self.assertEqual(self.container_1.modify_attribute(100, {}, None, 1, is_dead=True), 2)
self.assertEqual(self.container_1.modify_attribute(66, {}, None, 1, is_dead=True), 2)
self.assertEqual(self.container_1.modify_attribute(50, {}, None, 1, is_dead=True), 2)
self.assertEqual(self.container_1.modify_attribute(33, {}, None, 1, is_dead=True), 2)
self.assertEqual(self.container_1.modify_attribute(32, {}, None, 1, is_dead=True), 2)
self.assertEqual(self.container_1.modify_attribute(9, {}, None, 1, is_dead=True), 2)
def test_check_attribute__allow_for_dead(self):
self.container_1.start = frozenset(self.container_1.start | set((effects.ABILITIES.TEMPORARY,)))
with mock.patch('the_tale.game.companions.abilities.effects.Base.check_attribute', lambda self, modifier: True):
self.assertEqual(self.container_1.check_attribute(100, None, is_dead=True), True)
self.assertEqual(self.container_1.check_attribute(66, None, is_dead=True), True)
self.assertEqual(self.container_1.check_attribute(50, None, is_dead=True), True)
self.assertEqual(self.container_1.check_attribute(33, None, is_dead=True), True)
self.assertEqual(self.container_1.check_attribute(32, None, is_dead=True), True)
self.assertEqual(self.container_1.check_attribute(9, None, is_dead=True), True)
with mock.patch('the_tale.game.companions.abilities.effects.Base.check_attribute', lambda self, modifier: False):
self.assertEqual(self.container_1.check_attribute(100, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(66, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(50, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(33, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(32, None, is_dead=True), False)
self.assertEqual(self.container_1.check_attribute(9, None, is_dead=True), False)
def test_can_be_freezed(self):
self.assertTrue(self.container_1.can_be_freezed())
self.assertTrue(self.container_2.can_be_freezed())
def test_can_be_freezed_2(self):
self.container_1.start = frozenset(self.container_1.start | set((effects.ABILITIES.TEMPORARY,)))
self.assertFalse(self.container_1.can_be_freezed())
| 72.337963
| 165
| 0.67712
| 1,951
| 15,625
| 5.204511
| 0.049206
| 0.162596
| 0.168407
| 0.248178
| 0.889009
| 0.828147
| 0.777526
| 0.753595
| 0.725133
| 0.69155
| 0
| 0.037133
| 0.20544
| 15,625
| 215
| 166
| 72.674419
| 0.780749
| 0
| 0
| 0.155172
| 0
| 0
| 0.024322
| 0.024322
| 0
| 0
| 0
| 0
| 0.609195
| 1
| 0.097701
| false
| 0
| 0.011494
| 0
| 0.114943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6279ef8040cce2e911dcb6be066112176947a47e
| 20,007
|
py
|
Python
|
tests_openshift/database/test_events.py
|
majamassarini/packit-service
|
12baf67799412c8fa56e2a821cd9d584e2437141
|
[
"MIT"
] | 20
|
2019-05-24T12:33:05.000Z
|
2020-07-28T06:03:57.000Z
|
tests_openshift/database/test_events.py
|
majamassarini/packit-service
|
12baf67799412c8fa56e2a821cd9d584e2437141
|
[
"MIT"
] | 735
|
2019-05-15T11:52:36.000Z
|
2020-08-02T23:21:44.000Z
|
tests_openshift/database/test_events.py
|
majamassarini/packit-service
|
12baf67799412c8fa56e2a821cd9d584e2437141
|
[
"MIT"
] | 28
|
2019-05-16T13:32:03.000Z
|
2020-07-29T10:23:54.000Z
|
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
from operator import attrgetter
from flexmock import flexmock
from ogr.services.github import GithubProject
from packit_service.constants import KojiTaskState, PG_BUILD_STATUS_FAILURE
from packit_service.models import (
ProjectReleaseModel,
GitProjectModel,
GitBranchModel,
PullRequestModel,
CoprBuildTargetModel,
TFTTestRunTargetModel,
TestingFarmResult,
)
from packit_service.worker.events import (
ReleaseEvent,
PushGitHubEvent,
PullRequestGithubEvent,
PullRequestCommentGithubEvent,
TestingFarmResultsEvent,
MergeRequestGitlabEvent,
KojiTaskEvent,
MergeRequestCommentGitlabEvent,
PushGitlabEvent,
CheckRerunCommitEvent,
CheckRerunPullRequestEvent,
CheckRerunReleaseEvent,
AbstractForgeIndependentEvent,
)
from packit_service.worker.parser import Parser
from packit_service.worker.helpers.testing_farm import TestingFarmJobHelper
from tests_openshift.conftest import SampleValues
def test_release_event_existing_release(
clean_before_and_after, release_model, release_event_dict
):
flexmock(GithubProject).should_receive("get_sha_from_tag").and_return(
SampleValues.commit_sha
)
event_object = Parser.parse_event(release_event_dict)
assert isinstance(event_object, ReleaseEvent)
assert event_object.identifier == "v1.0.2"
assert event_object.git_ref == "v1.0.2"
assert event_object.commit_sha == "80201a74d96c"
assert event_object.tag_name == "v1.0.2"
assert isinstance(event_object.db_trigger, ProjectReleaseModel)
assert event_object.db_trigger == release_model
assert event_object.db_trigger.tag_name == "v1.0.2"
assert event_object.db_trigger.commit_hash == "80201a74d96c"
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_release_event_non_existing_release(clean_before_and_after, release_event_dict):
flexmock(GithubProject).should_receive("get_sha_from_tag").and_return(
SampleValues.commit_sha
)
event_object = Parser.parse_event(release_event_dict)
assert isinstance(event_object, ReleaseEvent)
assert event_object.identifier == "v1.0.2"
assert event_object.git_ref == "v1.0.2"
assert event_object.commit_sha == "80201a74d96c"
assert event_object.tag_name == "v1.0.2"
assert isinstance(event_object.db_trigger, ProjectReleaseModel)
assert event_object.db_trigger.tag_name == "v1.0.2"
assert event_object.db_trigger.commit_hash == "80201a74d96c"
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_push_branch_event_existing_branch(
clean_before_and_after, branch_model, push_branch_event_dict
):
event_object = Parser.parse_event(push_branch_event_dict)
assert isinstance(event_object, PushGitHubEvent)
assert event_object.identifier == "build-branch"
assert event_object.git_ref == "build-branch"
assert event_object.commit_sha == "04885ff850b0fa0e206cd09db73565703d48f99b"
assert isinstance(event_object.db_trigger, GitBranchModel)
assert event_object.db_trigger == branch_model
assert event_object.db_trigger.name == "build-branch"
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_push_branch_event_non_existing_branch(
clean_before_and_after, push_branch_event_dict
):
event_object = Parser.parse_event(push_branch_event_dict)
assert isinstance(event_object, PushGitHubEvent)
assert event_object.identifier == "build-branch"
assert event_object.git_ref == "build-branch"
assert event_object.commit_sha == "04885ff850b0fa0e206cd09db73565703d48f99b"
assert isinstance(event_object.db_trigger, GitBranchModel)
assert event_object.db_trigger.name == "build-branch"
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_pr_event_existing_pr(clean_before_and_after, pr_model, pr_event_dict):
event_object = Parser.parse_event(pr_event_dict)
assert isinstance(event_object, PullRequestGithubEvent)
assert event_object.identifier == "342"
assert event_object.git_ref is None
assert event_object.commit_sha == "528b803be6f93e19ca4130bf4976f2800a3004c4"
assert event_object.pr_id == 342
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger == pr_model
assert event_object.db_trigger.pr_id == 342
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_mr_event_existing_mr(clean_before_and_after, mr_model, mr_event_dict):
event_object = Parser.parse_event(mr_event_dict)
assert isinstance(event_object, MergeRequestGitlabEvent)
assert event_object.git_ref is None
assert event_object.commit_sha == "45e272a57335e4e308f3176df6e9226a9e7805a9"
assert event_object.pr_id == 2
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger == mr_model
assert event_object.db_trigger.pr_id == 2
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "repo-name"
def test_merge_request_comment_event(clean_before_and_after, mr_comment_event_dict):
event_object = Parser.parse_event(mr_comment_event_dict)
assert isinstance(event_object, MergeRequestCommentGitlabEvent)
assert event_object.pr_id == 2
assert event_object.identifier == "2"
assert event_object.git_ref is None
assert event_object.commit_sha == "45e272a57335e4e308f3176df6e9226a9e7805a9"
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger.pr_id == 2
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "testing-packit"
assert event_object.db_trigger.project.repo_name == "hello-there"
def test_push_gitlab_event(
clean_before_and_after, branch_model_gitlab, push_gitlab_event_dict
):
event_object = Parser.parse_event(push_gitlab_event_dict)
assert isinstance(event_object, PushGitlabEvent)
assert event_object.identifier == "build-branch"
assert event_object.git_ref == "build-branch"
assert event_object.commit_sha == "cb2859505e101785097e082529dced35bbee0c8f"
assert isinstance(event_object.db_trigger, GitBranchModel)
assert event_object.db_trigger == branch_model_gitlab
assert event_object.db_trigger.name == "build-branch"
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "repo-name"
def test_pr_event_non_existing_pr(clean_before_and_after, pr_event_dict):
event_object = Parser.parse_event(pr_event_dict)
assert isinstance(event_object, PullRequestGithubEvent)
assert event_object.identifier == "342"
assert event_object.git_ref is None
assert event_object.commit_sha == "528b803be6f93e19ca4130bf4976f2800a3004c4"
assert event_object.pr_id == 342
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger.pr_id == 342
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_pr_comment_event_existing_pr(
clean_before_and_after, pr_model, pr_comment_event_dict_packit_build
):
event_object = Parser.parse_event(pr_comment_event_dict_packit_build)
assert isinstance(event_object, PullRequestCommentGithubEvent)
assert event_object.identifier == "342"
assert event_object.git_ref is None
assert event_object.pr_id == 342
assert event_object.project_url == "https://github.com/the-namespace/the-repo-name"
flexmock(GithubProject).should_receive("get_pr").with_args(pr_id=342).and_return(
flexmock(head_commit="12345")
)
assert event_object.commit_sha == "12345"
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger == pr_model
assert event_object.db_trigger.pr_id == 342
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_pr_comment_event_non_existing_pr(
clean_before_and_after, pr_comment_event_dict_packit_build
):
event_object = Parser.parse_event(pr_comment_event_dict_packit_build)
assert isinstance(event_object, PullRequestCommentGithubEvent)
assert event_object.identifier == "342"
assert event_object.git_ref is None
assert event_object.pr_id == 342
flexmock(GithubProject).should_receive("get_pr").with_args(pr_id=342).and_return(
flexmock(head_commit="12345")
)
assert event_object.commit_sha == "12345"
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger.pr_id == 342
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_testing_farm_response_existing_pr(
clean_before_and_after, pr_model, a_new_test_run_pr, tf_notification, tf_result
):
flexmock(TestingFarmJobHelper).should_receive("get_request_details").with_args(
SampleValues.pipeline_id
).and_return(tf_result)
event_object = Parser.parse_event(tf_notification)
assert isinstance(event_object, TestingFarmResultsEvent)
assert event_object.commit_sha == SampleValues.commit_sha
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger == pr_model
assert event_object.db_trigger.pr_id == 342
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_testing_farm_response_non_existing_pr(
clean_before_and_after, tf_notification, tf_result
):
flexmock(TestingFarmJobHelper).should_receive("get_request_details").with_args(
SampleValues.pipeline_id
).and_return(tf_result)
event_object = Parser.parse_event(tf_notification)
assert isinstance(event_object, TestingFarmResultsEvent)
assert event_object.commit_sha == SampleValues.different_commit_sha
assert not event_object.db_trigger
def test_testing_farm_response_existing_branch_push(
clean_before_and_after,
branch_model,
a_new_test_run_branch_push,
tf_notification,
tf_result,
):
flexmock(TestingFarmJobHelper).should_receive("get_request_details").with_args(
SampleValues.pipeline_id
).and_return(tf_result)
event_object = Parser.parse_event(tf_notification)
assert isinstance(event_object, TestingFarmResultsEvent)
assert event_object.commit_sha == SampleValues.commit_sha
assert isinstance(event_object.db_trigger, GitBranchModel)
assert event_object.db_trigger == branch_model
assert event_object.db_trigger.name == "build-branch"
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_testing_farm_response_non_existing_branch_push(
clean_before_and_after, tf_notification, tf_result
):
flexmock(TestingFarmJobHelper).should_receive("get_request_details").with_args(
SampleValues.pipeline_id
).and_return(tf_result)
event_object = Parser.parse_event(tf_notification)
assert isinstance(event_object, TestingFarmResultsEvent)
# For backwards compatibility, unknown results are treated as pull-requests
assert event_object.commit_sha == SampleValues.different_commit_sha
assert not event_object.db_trigger
def test_koji_build_scratch_start(
clean_before_and_after, pr_model, a_koji_build_for_pr, koji_build_scratch_start_dict
):
event_object = Parser.parse_event(koji_build_scratch_start_dict)
assert isinstance(event_object, KojiTaskEvent)
assert event_object.build_id == SampleValues.build_id
assert event_object.state == KojiTaskState.open
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger == pr_model
assert event_object.db_trigger.pr_id == 342
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_koji_build_scratch_end(
clean_before_and_after, pr_model, a_koji_build_for_pr, koji_build_scratch_end_dict
):
event_object = Parser.parse_event(koji_build_scratch_end_dict)
assert isinstance(event_object, KojiTaskEvent)
assert event_object.build_id == SampleValues.build_id
assert event_object.state == KojiTaskState.closed
assert isinstance(event_object.db_trigger, PullRequestModel)
assert event_object.db_trigger == pr_model
assert event_object.db_trigger.pr_id == 342
assert isinstance(event_object.db_trigger.project, GitProjectModel)
assert event_object.db_trigger.project.namespace == "the-namespace"
assert event_object.db_trigger.project.repo_name == "the-repo-name"
def test_parse_check_rerun_commit(
clean_before_and_after,
branch_model,
branch_trigger_model,
check_rerun_event_dict_commit,
):
check_rerun_event_dict_commit["check_run"]["external_id"] = str(
branch_trigger_model.id
)
event_object = Parser.parse_event(check_rerun_event_dict_commit)
assert isinstance(event_object, CheckRerunCommitEvent)
assert event_object.repo_namespace == "packit"
assert event_object.repo_name == "hello-world"
assert event_object.commit_sha == "0e5d8b51fd5dfa460605e1497d22a76d65c6d7fd"
assert event_object.project_url == "https://github.com/packit/hello-world"
assert event_object.git_ref == branch_model.name
assert event_object.identifier == branch_model.name
assert event_object.check_name_job == "testing-farm"
assert event_object.check_name_target == "fedora-rawhide-x86_64"
assert isinstance(event_object.project, GithubProject)
assert event_object.project.full_repo_name == "packit/hello-world"
assert not event_object.base_project
assert event_object.tests_targets_override == {"fedora-rawhide-x86_64"}
def test_parse_check_rerun_pull_request(
clean_before_and_after, pr_model, pr_trigger_model, check_rerun_event_dict_commit
):
check_rerun_event_dict_commit["check_run"]["external_id"] = str(pr_trigger_model.id)
event_object = Parser.parse_event(check_rerun_event_dict_commit)
assert isinstance(event_object, CheckRerunPullRequestEvent)
assert event_object.repo_namespace == "packit"
assert event_object.repo_name == "hello-world"
assert event_object.commit_sha == "0e5d8b51fd5dfa460605e1497d22a76d65c6d7fd"
assert event_object.project_url == "https://github.com/packit/hello-world"
assert event_object.pr_id == pr_model.pr_id
assert event_object.identifier == str(pr_model.pr_id)
assert isinstance(event_object.project, GithubProject)
assert event_object.project.full_repo_name == "packit/hello-world"
assert (
not event_object.base_project # With Github app, we cannot work with fork repo
)
assert event_object.check_name_job == "testing-farm"
assert event_object.check_name_target == "fedora-rawhide-x86_64"
assert event_object.tests_targets_override == {"fedora-rawhide-x86_64"}
def test_parse_check_rerun_release(
clean_before_and_after,
release_model,
release_trigger_model,
check_rerun_event_dict_commit,
):
check_rerun_event_dict_commit["check_run"]["external_id"] = str(
release_trigger_model.id
)
event_object = Parser.parse_event(check_rerun_event_dict_commit)
assert isinstance(event_object, CheckRerunReleaseEvent)
assert event_object.repo_namespace == "packit"
assert event_object.repo_name == "hello-world"
assert event_object.commit_sha == "0e5d8b51fd5dfa460605e1497d22a76d65c6d7fd"
assert event_object.project_url == "https://github.com/packit/hello-world"
assert event_object.tag_name == release_model.tag_name
assert event_object.git_ref == release_model.tag_name
assert event_object.identifier == release_model.tag_name
assert isinstance(event_object.project, GithubProject)
assert event_object.project.full_repo_name == "packit/hello-world"
assert (
not event_object.base_project # With Github app, we cannot work with fork repo
)
assert event_object.check_name_job == "testing-farm"
assert event_object.check_name_target == "fedora-rawhide-x86_64"
assert event_object.tests_targets_override == {"fedora-rawhide-x86_64"}
def test_filter_failed_models_targets_copr(
clean_before_and_after, multiple_copr_builds
):
builds_list = list(
CoprBuildTargetModel.get_all_by(
project_name=SampleValues.project,
commit_sha=SampleValues.ref,
)
)
assert len(builds_list) == 3
# these targets should be different
assert builds_list[0].target != builds_list[2].target
# 2 builds with failed status and one with success
builds_list[0].set_status(PG_BUILD_STATUS_FAILURE)
builds_list[1].set_status(PG_BUILD_STATUS_FAILURE)
builds_list[2].set_status(PG_BUILD_STATUS_FAILURE)
filtered_models = (
AbstractForgeIndependentEvent._filter_most_recent_models_targets_by_status(
models=builds_list,
statuses_to_filter_with=[PG_BUILD_STATUS_FAILURE],
)
)
assert len(filtered_models) == 2 # we don't do duplicate models here
most_recent_duplicate = max(builds_list[:2], key=attrgetter("build_submitted_time"))
assert most_recent_duplicate.target in filtered_models
def test_filter_failed_models_targets_tf(
clean_before_and_after, multiple_new_test_runs
):
test_list = list(
TFTTestRunTargetModel.get_all_by_commit_target(
commit_sha=SampleValues.commit_sha
)
)
assert len(test_list) == 3
# 2 builds with failed status and one with success
test_list[0].set_status(TestingFarmResult.failed)
test_list[1].set_status(TestingFarmResult.error)
test_list[2].set_status(TestingFarmResult.failed)
filtered_models = (
AbstractForgeIndependentEvent._filter_most_recent_models_targets_by_status(
models=test_list,
statuses_to_filter_with=[
TestingFarmResult.failed,
TestingFarmResult.error,
],
)
)
assert len(filtered_models) == 2 # we don't do duplicates here
most_recent_duplicate = max(test_list[1:3], key=attrgetter("submitted_time"))
assert most_recent_duplicate.target in filtered_models
| 39.383858
| 88
| 0.781526
| 2,540
| 20,007
| 5.775591
| 0.07874
| 0.161213
| 0.158759
| 0.121336
| 0.867008
| 0.841104
| 0.810293
| 0.794547
| 0.76728
| 0.746012
| 0
| 0.025766
| 0.140651
| 20,007
| 507
| 89
| 39.461538
| 0.827488
| 0.021792
| 0
| 0.628866
| 0
| 0
| 0.085276
| 0.026892
| 0
| 0
| 0
| 0
| 0.520619
| 1
| 0.056701
| false
| 0
| 0.023196
| 0
| 0.079897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
657dd57afb45502930b8d0993fbfa669177e8daa
| 9,091
|
py
|
Python
|
flask_blogging/signals.py
|
lniwn/Flask-Blogging
|
2733f5251962f62df6bf19baf7055a25c976e5e0
|
[
"MIT"
] | null | null | null |
flask_blogging/signals.py
|
lniwn/Flask-Blogging
|
2733f5251962f62df6bf19baf7055a25c976e5e0
|
[
"MIT"
] | null | null | null |
flask_blogging/signals.py
|
lniwn/Flask-Blogging
|
2733f5251962f62df6bf19baf7055a25c976e5e0
|
[
"MIT"
] | null | null | null |
"""
The flask_blogging signals module
"""
import blinker
signals = blinker.Namespace()
engine_initialised = signals.signal("engine_initialised", doc="""\
Signal send by the ``BloggingEngine`` after the object is initialized.
The arguments passed by the signal are:
:param app: The Flask app which is the sender
:type app: object
:keyword engine: The blogging engine that was initialized
:type engine: object
""")
post_processed = signals.signal("post_processed", doc="""\
Signal sent when a post is processed (i.e., the markdown is converted
to html text). The arguments passed along with this signal are:
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param post: The post object which was processed
:type post: dict
:param render: Flag to denote if the post is to be rendered or not
:type render: bool
""")
page_by_id_fetched = signals.signal("page_by_id_fetched", doc="""\
Signal sent when a blog page specified by ``id`` is fetched,
and prior to the post being processed.
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param post: The post object which was fetched
:type post: dict
:param meta: The metadata associated with that page
:type meta: dict
:param post_id: The identifier of the post
:type post_id: int
:param slug: The slug associated with the page
:type slug: str
""")
page_by_id_processed = signals.signal("page_by_id_generated", doc="""\
Signal sent when a blog page specified by ``id`` is fetched,
and prior to the post being processed.
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param post: The post object which was processed
:type post: dict
:param meta: The metadata associated with that page
:type meta: dict
:param post_id: The identifier of the post
:type post_id: int
:param slug: The slug associated with the page
:type slug: str
""")
posts_by_tag_fetched = signals.signal("posts_by_tag_fetched", doc="""\
Signal sent when posts are fetched for a given tag but before processing
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched with a given tag
:type posts: list
:param meta: The metadata associated with that page
:type meta: dict
:param tag: The tag that is requested
:type tag: str
:param count: The number of posts per page
:type count: int
:param page: The page offset
:type page: int
""")
posts_by_tag_processed = signals.signal("posts_by_tag_generated", doc="""\
Signal sent after posts for a given tag were fetched and processed
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched and processed with a given tag
:type posts: list
:param meta: The metadata associated with that page
:type meta: dict
:param tag: The tag that is requested
:type tag: str
:param count: The number of posts per page
:type count: int
:param page: The page offset
:type page: int
""")
posts_by_author_fetched = signals.signal("posts_by_author_fetched", doc="""\
Signal sent after posts by an author were fetched but before processing
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched with a given author
:type posts: list
:param meta: The metadata associated with that page
:type meta: dict
:param user_id: The ``user_id`` for the author
:type user_id: str
:param count: The number of posts per page
:type count: int
:param page: The page offset
:type page: int
""")
posts_by_author_processed = signals.signal("posts_by_author_generated",
doc="""\
Signal sent after posts by an author were fetched and processed
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched and processed with a given author
:type posts: list
:param meta: The metadata associated with that page
:type meta: dict
:param user_id: The ``user_id`` for the author
:type user_id: str
:param count: The number of posts per page
:type count: int
:param page: The page offset
:type page: int
""")
index_posts_fetched = signals.signal("index_posts_fetched", doc="""\
Signal sent after the posts for the index page are fetched
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched for the index page
:type posts: list
:param meta: The metadata associated with that page
:type meta: dict
:param count: The number of posts per page
:type count: int
:param page: The page offset
:type page: int
""")
index_posts_processed = signals.signal("index_posts_processed", doc="""\
Signal sent after the posts for the index page are fetched and processed
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched and processed with a given author
:type posts: list
:param meta: The metadata associated with that page
:type meta: dict
:param count: The number of posts per page
:type count: int
:param page: The page offset
:type page: int
""")
feed_posts_fetched = signals.signal("feed_posts_fetched", doc="""\
Signal send after feed posts are fetched
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched and processed with a given author
:type posts: list
""")
feed_posts_processed = signals.signal("feed_posts_processed", doc="""\
Signal send after feed posts are processed
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param feed: Feed of post fetched and processed
:type feed: list
""")
sitemap_posts_fetched = signals.signal("sitemap_posts_fetched", doc="""\
Signal send after posts are fetched
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched and processed with a given author
:type posts: list
""")
sitemap_posts_processed = signals.signal("sitemap_posts_processed", doc="""\
Signal send after posts are fetched and processed
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param posts: Lists of post fetched and processed with a given author
:type posts: list
""")
editor_post_saved = signals.signal("editor_post_saved", doc="""\
Signal sent after a post was saved during the POST request
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param post_id: The id of the post that was deleted
:type post_id: int
:param user: The user object
:type user: object
:param post: The post that was deleted
:type post: object
""")
editor_get_fetched = signals.signal("editor_get_fetched", doc="""\
Signal sent after fetching the post during the GET request
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param post_id: The id of the post that was deleted
:type post_id: int
:param form: The form prepared for the editor display
:type form: object
""")
post_deleted = signals.signal("post_deleted", doc="""\
The signal sent after the post is deleted.
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param post_id: The id of the post that was deleted
:type post_id: int
:param post: The post that was deleted
:type post: object
""")
blueprint_created = signals.signal("blueprint_created", doc="""\
The signal sent after the blueprint is created. A good time to
add other views to the blueprint.
:param app: The Flask app which is the sender
:type app: object
:param engine: The blogging engine that was initialized
:type engine: object
:param blueprint: The blog app blueprint
:type blueprint: object
""")
sqla_initialized = signals.signal("sqla_initialized", doc="""\
Signal sent after the SQLAStorage object is initialized
:param sqlastorage: The SQLAStorage object
:type sqlastorage: object
:param engine: The blogging engine that was initialized
:type engine: object
:param table_prefix: The prefix to use for tables
:type table_prefix: str
:param meta: The metadata for the database
:type meta: object
:param bind: The bind value in the multiple db scenario.
:type bind: object
""")
| 31.348276
| 76
| 0.763722
| 1,462
| 9,091
| 4.682627
| 0.084131
| 0.061058
| 0.047181
| 0.063833
| 0.789512
| 0.743938
| 0.723342
| 0.709758
| 0.709758
| 0.709758
| 0
| 0
| 0.159718
| 9,091
| 289
| 77
| 31.456747
| 0.896191
| 0.00363
| 0
| 0.732794
| 0
| 0
| 0.876838
| 0.014925
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.008097
| 0.004049
| 0
| 0.004049
| 0.020243
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65d6cd56f16a393ae3e50b83851c7390c8e631bc
| 139
|
py
|
Python
|
python/testData/psi/PatternMatchingOrPatterns.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/psi/PatternMatchingOrPatterns.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/psi/PatternMatchingOrPatterns.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
match x:
case 1 | 2:
pass
case (1 | 2):
pass
case (1 | 2) | 3:
pass
case 1 | (2 | 3):
pass
| 13.9
| 21
| 0.345324
| 20
| 139
| 2.4
| 0.35
| 0.416667
| 0.5
| 0.625
| 0.875
| 0.875
| 0.541667
| 0
| 0
| 0
| 0
| 0.153846
| 0.532374
| 139
| 9
| 22
| 15.444444
| 0.584615
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.444444
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.