hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
04746c2207173f41e969b8ac926582e0d1549db2 | 93 | py | Python | classopt/__init__.py | moisutsu/classopt | 5822f4af925daf802317e528e9208d64061074ff | [
"MIT"
] | 2 | 2022-01-11T17:37:47.000Z | 2022-03-06T14:30:49.000Z | classopt/__init__.py | moisutsu/classopt | 5822f4af925daf802317e528e9208d64061074ff | [
"MIT"
] | 3 | 2021-08-07T08:33:18.000Z | 2021-08-07T08:36:40.000Z | classopt/__init__.py | moisutsu/classopt | 5822f4af925daf802317e528e9208d64061074ff | [
"MIT"
] | 1 | 2022-03-06T15:31:22.000Z | 2022-03-06T15:31:22.000Z | from .decorator import classopt
from .inheritance import ClassOpt
from .config import config
| 23.25 | 33 | 0.83871 | 12 | 93 | 6.5 | 0.5 | 0.358974 | 0.461538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 93 | 3 | 34 | 31 | 0.962963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
04af25a6903aadb12184a0b281e21e67875cee9d | 110 | py | Python | autonmt/toolkits/__init__.py | PRHLT/autonmt | eb0abe9d90feb8cc15f396325c0e4167f7a454a8 | [
"MIT"
] | 5 | 2022-01-10T07:59:16.000Z | 2022-01-14T01:02:52.000Z | autonmt/toolkits/__init__.py | PRHLT/autonmt | eb0abe9d90feb8cc15f396325c0e4167f7a454a8 | [
"MIT"
] | 2 | 2022-01-01T06:10:27.000Z | 2022-01-14T01:10:48.000Z | autonmt/toolkits/__init__.py | PRHLT/autonmt | eb0abe9d90feb8cc15f396325c0e4167f7a454a8 | [
"MIT"
] | 2 | 2022-01-10T08:20:02.000Z | 2022-02-22T08:10:16.000Z | from autonmt.toolkits.autonmt import AutonmtTranslator
from autonmt.toolkits.fairseq import FairseqTranslator
| 36.666667 | 54 | 0.890909 | 12 | 110 | 8.166667 | 0.583333 | 0.22449 | 0.387755 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.072727 | 110 | 2 | 55 | 55 | 0.960784 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b6ba1bbcbc9d7a065d2f6ec47952757b45ed7516 | 159 | py | Python | python/test/base.py | pymor/dune-gdt | fabc279a79e7362181701866ce26133ec40a05e0 | [
"BSD-2-Clause"
] | 4 | 2018-10-12T21:46:08.000Z | 2020-08-01T18:54:02.000Z | python/test/base.py | dune-community/dune-gdt | fabc279a79e7362181701866ce26133ec40a05e0 | [
"BSD-2-Clause"
] | 154 | 2016-02-16T13:50:54.000Z | 2021-12-13T11:04:29.000Z | python/test/base.py | dune-community/dune-gdt | fabc279a79e7362181701866ce26133ec40a05e0 | [
"BSD-2-Clause"
] | 5 | 2016-03-02T10:11:20.000Z | 2020-02-08T03:56:24.000Z | import pytest
# from dune.xt.common.test import load_all_submodule
def test_load_all():
pass
# import dune.gdt as gdt
# load_all_submodule(gdt)
| 15.9 | 52 | 0.72327 | 25 | 159 | 4.36 | 0.56 | 0.192661 | 0.293578 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.201258 | 159 | 9 | 53 | 17.666667 | 0.858268 | 0.610063 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 8 |
b6ed92cee45a1cbb6b87ee79544a5924ec5589bb | 8,884 | py | Python | script.py | pgbito/sharepython | 37a95d296c03cbcff090c41faf579188d1bd0b40 | [
"Xnet",
"X11"
] | null | null | null | script.py | pgbito/sharepython | 37a95d296c03cbcff090c41faf579188d1bd0b40 | [
"Xnet",
"X11"
] | null | null | null | script.py | pgbito/sharepython | 37a95d296c03cbcff090c41faf579188d1bd0b40 | [
"Xnet",
"X11"
] | null | null | null |
from http.server import BaseHTTPRequestHandler, HTTPServer
import sys
import os
import glob
def handle_by_header(ext):
""" Tries to resolve content-type header with the extension given"""
ext = ext.lower()
if ext == '.aac':
return 'audio/aac'
elif ext == '.bin':
return 'application/octet-stream'
elif ext == '.avi':
return 'video/x-msvideo'
elif ext == '.bz':
return 'application/x-bzip'
elif ext == '.doc':
return 'application/msword'
elif ext == '.zip':
return 'application/zip'
elif ext == '.7z':
return 'application/x-7z-compressed'
elif ext == '.ico':
return 'image/x-icon'
elif ext == '.jar':
return 'application/java-archive'
elif ext == '.jpg':
return 'image/jpeg'
elif ext == '.png':
return 'image/png'
elif ext == '.js':
return 'application/javascript'
elif ext == '.json':
return 'application/json'
elif ext == '.mpeg':
return 'video/mpeg'
elif ext == '.ogg':
return 'audio/ogg'
elif ext == '.pdf':
return 'application/pdf'
elif ext == '.rar':
return 'application/x-rar-compressed'
elif ext == '.tar':
return 'application/x-tar'
elif ext == '.wav':
return 'audio/x-wav'
elif ext == '.weba':
return 'audio/webm'
elif ext == '.webm':
return 'video/webm'
elif ext == '.webp':
return 'image/webp'
else:
return None
textFileExtensions = '.csv .csh .xml .xhtml .sh .bat .ps1 .php .js .py .json .jsonc .txt .ini .md .java .c .cpp .h .i .go .rs .kt .cs .css .ex .exs .b .cc'.split(
' ')
def filetransfer(Message: str, FilePath: str):
try:
import \
pyngrok.ngrok
except ImportError:
print('Please install pyngrok.\t pip install pyngrok')
return
hostName = 'localhost'
serverPort = 7001
print('\t',Message, pyngrok.ngrok.connect(
serverPort, 'http', bind_tls=True), end="\r")
class ws(BaseHTTPRequestHandler):
def do_GET(self):
if self.path != '/favicon.ico':
b = None
filepath, ext = os.path.splitext(FilePath)
text=0
if ext in textFileExtensions:
text=1
contentHeader = "text/plain; charset=utf-8"
b = bytes(
f"{ open(FilePath, 'r', encoding='utf-8').read()}", 'utf-8')
else:
b = bytes(open(FilePath, 'rb').read())
contentHeader = handle_by_header(ext)
self.send_response(200)
if contentHeader is not None:
self.send_header('Content-Type', contentHeader)
if text!=1:
self.send_header('Content-Disposition',f'attachment; filename="{os.path.basename(FilePath)}"')
self.end_headers()
self.wfile.write(b)
else:
self.send_response(200)
self.end_headers()
self.wfile.write(bytes(b'\x00\x00\x01\x00\x01\x00\x10\x10\x00\x00\x00\x00\x00\x00h\x05\x00\x00\x16\x00\x00\x00(\x00\x00\x00\x10\x00\x00\x00 \x00\x00\x00\x01\x00\x08\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00ZZ\xf6\x00\x00\x00\xcd\x00\xa6\xa6\xff\x00\x92\x92\xf1\x00\x00\x00\xd6\x00\x16\x16\xea\x00\xe1\xe1\xfa\x00\x95\x95\xfa\x00$$\xc3\x00\x16\x16\xfc\x00\t\t\xd6\x00\x81\x81\xf5\x00\r\r\xd0\x00\xb7\xb7\xf3\x00\x08\x08\xee\x00RR\xfa\x00qq\xea\x00\x00\x00\xbc\x00$$\xf9\x00\x00\x00\xc5\x00\x8e\x8e\xf8\x00\x03\x03\xc5\x00\x00\x00\xd7\x00__\xf4\x00\x81\x81\xe4\x00\x9b\x9b\xfb\x00\t\t\xe9\x00\x86\x86\xf3\x00##\xf7\x00\x00\x00\xbd\x00\xf0\xf0\xfb\x00\x11\x11\xe6\x00\x00\x00\xc6\x00//\xdc\x00\x00\x00\xcf\x00\x00\x00\xd8\x0022\xdc\x00EE\xf9\x00HH\xf0\x00\x00\x00\xe1\x00\x99\x99\xe4\x00__\xfe\x00\x06\x06\xcf\x00\x03\x03\xe1\x00\xb7\xb7\xec\x00\x0b\x0b\xd5\x00\x99\x99\xff\x00\xcf\xcf\xf4\x00\x84\x84\xf7\x00::\xfd\x00\xa2\xa2\xf6\x00HH\xcd\x00\x00\x00\xbe\x00\x00\x00\xc7\x00\x00\x00\xd0\x00CC\xfd\x00\x00\x00\xe2\x00\xc6\xc6\xfe\x00\x96\x96\xf7\x00gg\xf3\x00\x1f\x1f\xe4\x00\t\t\xd9\x00\x9a\x9a\xfa\x00\x08\x08\xdf\x00\x12\x12\xbe\x00\xb4\xb4\xff\x00\xea\xea\xfd\x00\x00\x00\xc8\x00\x90\x90\xef\x00++\xed\x00\x00\x00\xd1\x00uu\xf9\x00ww\xf6\x00\xf6\xf6\xfd\x00\x17\x17\xe8\x00\xde\xde\xfe\x00\x01\x01\xdd\x00\x0c\x0c\xbf\x00\x03\x03\xe3\x00\t\t\xd1\x00\xe6\xe6\xf2\x00\x00\x00\xc0\x00\xa5\xa5\xf8\x00cc\xd7\x00\x16\x16\xdd\x00\xf3\xf3\xfe\x00\x00\x00\xd2\x00\xde\xde\xf6\x00\x00\x00\xdb\x00\x01\x01\xde\x00dd\xec\x00--\xfd\x00\x14\x14\xfb\x00\xf9\xf9\xfe\x0099\xd0\x00II\xf6\x00\x01\x01\xe7\x00\xe1\xe1\xff\x00\x96\x96\xf9\x00UU\xed\x00\xea\xea\xff\x00\x8a\x8a\xf1\x00\x00\x00\xc1\x00%%\xf8\x00\xf0\xf0\xff\x00\x0f\x0f\xed\x00\x00\x00\xca\x00\\\\\xf9\x00\x00\x00\xdc\x00\xf9\xf9\xff\x00\x84\x84\xe0\x00~~\xfb\x00\xff\xff\xff\x00\'\'\xe4\x00\x00\x00\xb9\x00\x00\x00\xc2\x00uu\xea\x00\x00\x00\xcb\x00\x04\x04\xce\x00\x00\x00\xdd\x0055\xe1\x00\x02\x02\xe3\x00\x9a\x9a\xf5\x00\x81\x81\xf3\x00 \xeb\x00\x9c\x9c\xfb\x00PP\xfb\x00\x1d\x1d\xfd\x00<<\xed\x00((\xe8\x00\x00\x00\xc3\x00\xd8\xd8\xf9\x00\x00\x00\xcc\x00\x8d\x8d\xfc\x00\x00\x00\xd5\x00__\xf2\x00//\xf4\x00||\xf7\x0044\xf1\x00TT\xdb\x0066\xee\x00\x08\x08\xe4\x0088\xf4\x00ll\xf5\x00\x84\x84\xfd\x00]]\xd2\x00\x89\x89\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00pppppp,,,,,pppppppppW\x91\x08rr@3,ppppppp(M\x11\x11\x1d\x1d\x1d\x1d4S\x1eppppnQQQfffsss\x82^]pp/\x15\x13\x13\x13 555CC\x18pp\x8bjjjuuu\x84\x84\x01\x01v*\x0cI]O"66FFFVVV-$!\n\r\x06\x86\x86\x04\x04\x04\x16\x16##=xt\x10TD\x83XXllwwLY?qZ\x03\x92<eU+\'88yN\x8d\x1f\x81c\x1bz{J2p\x80`\x1a\x05|E\x8c&\x87\x0b:P;iBpR\x0e\x8a\x17\x8fH\x890\x14PPb\x8e\x00ppp_\x88G\x07>\x19}PPPk\x1cKppph%g\x0foPPP\x85~\x129pppppm\x90\\\t[71\x7f)appppppppmA..\x02dppppp\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'))
webServer = HTTPServer((hostName, serverPort), ws)
try:
webServer.serve_forever()
except KeyboardInterrupt:
pass
webServer.server_close()
sys.stdout.write("\033[K")
argv = sys.argv[1:]
__usage__ = f'''Usage:
{os.path.basename(sys.argv[0])} [filepath]
A simple filetransfer for python
'''
if len(argv) == 0:
print(__usage__)
else:
FileExists = (os.path.exists(argv[0]) and os.path.isfile(argv[0]))
if FileExists == False:
# check if file argument is *.foo or foo.*
if argv[0].startswith('*') or argv[0].endswith('*'):
r = glob.glob(argv[0])
if r.__len__() == 0:
print(__usage__)
else:
filetransfer(Message='Sharing {0} via'.format(
r[0]), FilePath=r[0])
else:
print(__usage__)
else:
filetransfer(Message='Sharing {0} via'.format(
argv[0]), FilePath=argv[0])
| 60.849315 | 4,799 | 0.647681 | 1,611 | 8,884 | 3.546245 | 0.225326 | 0.62069 | 0.869596 | 1.081743 | 0.318747 | 0.313496 | 0.296867 | 0.296867 | 0.292666 | 0.275162 | 0 | 0.261089 | 0.152409 | 8,884 | 145 | 4,800 | 61.268966 | 0.49761 | 0.011594 | 0 | 0.151261 | 0 | 0.033613 | 0.595007 | 0.514533 | 0 | 1 | 0 | 0 | 0 | 1 | 0.02521 | false | 0.008403 | 0.05042 | 0 | 0.285714 | 0.042017 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8e3444d834d24392f4a4c47dc86bca64a8e5f4bc | 19,615 | py | Python | ProbabilityDistributionCSVs.py | arpan-ghosh/python-data-algorithms | 0cb2dd8cc5e427ec5c381fd9905b93b06825838b | [
"MIT"
] | 1 | 2017-01-18T06:27:07.000Z | 2017-01-18T06:27:07.000Z | ProbabilityDistributionCSVs.py | arpan-ghosh/python-data-algorithms | 0cb2dd8cc5e427ec5c381fd9905b93b06825838b | [
"MIT"
] | null | null | null | ProbabilityDistributionCSVs.py | arpan-ghosh/python-data-algorithms | 0cb2dd8cc5e427ec5c381fd9905b93b06825838b | [
"MIT"
] | null | null | null |
# coding: utf-8
# Consider the data in the files a100.csv, b100.csv, s057.csv. Try to determine the
# underlying probability distributions of each data set.
# In[3]:
# Using pandas library for CSV reading and table manipulation
import pandas
import matplotlib.pyplot as plt
# In[293]:
# Reading a100.csv dataset from workspace folder and storing into variable a100
a100 = pandas.read_csv('/home/idies/workspace/AS.171.205/data/a100.csv', header=None)
b100 = pandas.read_csv('/home/idies/workspace/AS.171.205/data/b100.csv', header=None)
s057 = pandas.read_csv('/home/idies/workspace/AS.171.205/data/s057.csv', header=None)
# In[294]:
# Quick data exploration of 100, will print top 10 rows
a100.head(10)
# In[295]:
# Summary of numerical fields of all 100 rows
a100.describe()
# In[305]:
# A raw histogram of a100.csv using the default matplotlib histogram method, at 75 bins for clearer distribution
a100.hist(bins=25)
# A raw histogram of b100.csv using the default matplotlib histogram method, at 100 bins for clearer distribution
b100.hist(bins=100)
# A raw histogram of s057.csv using the default matplotlib histogram method, at 15 bins for clearer distribution
s057.hist(bins=15)
# In[338]:
# A raw histogram of the default iPython histogram method, at 15 bins for clearer distribution
# Stepfilled, and axes labeled
plt.figure(figsize=(12,8))
a100.hist(bins=15,histtype='stepfilled', normed=True, color='r', alpha=.5, label='Log Norm')
plt.title("a100 Normal Distribution Histogram")
plt.xlabel("Value")
plt.ylabel("Probability")
plt.legend()
plt.show()
# In[341]:
# A raw histogram of the default iPython histogram method, at 100 bins for clearer distribution
# Stepfilled, and axes labeled for b100
b100.hist(bins=100, histtype='stepfilled', normed=True, color='r', alpha=.5, label='Gaussian/Normal')
plt.title("b100 (Log Norm) Histogram")
plt.xlabel("Value")
plt.ylabel("Probability")
plt.legend()
plt.show()
# In[344]:
# A raw histogram of the default iPython histogram method, at 15 bins for clearer distribution
# Stepfilled, and axes labeled for s057
s057.hist(bins=15, histtype='stepfilled', normed=True, color='r', alpha=.5, label='Binomial')
plt.title("s057 Binomial Distribution Histogram")
plt.xlabel("Value")
plt.ylabel("Probability")
plt.legend()
plt.show()
# ## I found an interesting set of code from StackOverflow user, "tmthydvnprt"
# http://stackoverflow.com/questions/6620471/fitting-empirical-distribution-to-theoretical-ones-with-scipy-python
#
# With his code, every possible scipy.stats distribution is checked through a loop, and the data is plotted with every line from those distributions. Then, which ever is the best distribution is plotted independently. I have slightly modified his code and ran the three csv files through it, but changed the bin size for each csv to make the graph more readable. The computation time for the loop is slow (takes about 30 seconds), so it will take time to print the graphs.
#
# ## a100.csv
# In[37]:
get_ipython().magic('matplotlib inline')
import warnings
import numpy as np
import pandas as pd
import scipy.stats as st
import statsmodels as sm
import matplotlib
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore")
matplotlib.rcParams['figure.figsize'] = (16.0, 12.0)
matplotlib.style.use('ggplot')
# Create models from data
def best_fit_distribution(data, bins=75, ax=None):
"""Model data by finding best fit distribution to data"""
# Get histogram of original data
y, x = np.histogram(data, bins=bins, normed=True)
x = (x + np.roll(x, -1))[:-1] / 2.0
# Distributions to check
DISTRIBUTIONS = [
st.alpha,st.anglit,st.arcsine,st.beta,st.betaprime,st.bradford,st.burr,st.cauchy,st.chi,st.chi2,st.cosine,
st.dgamma,st.dweibull,st.erlang,st.expon,st.exponnorm,st.exponweib,st.exponpow,st.f,st.fatiguelife,st.fisk,
st.foldcauchy,st.foldnorm,st.frechet_r,st.frechet_l,st.genlogistic,st.genpareto,st.gennorm,st.genexpon,
st.genextreme,st.gausshyper,st.gamma,st.gengamma,st.genhalflogistic,st.gilbrat,st.gompertz,st.gumbel_r,
st.gumbel_l,st.halfcauchy,st.halflogistic,st.halfnorm,st.halfgennorm,st.hypsecant,st.invgamma,st.invgauss,
st.invweibull,st.johnsonsb,st.johnsonsu,st.ksone,st.kstwobign,st.laplace,st.levy,st.levy_l,st.levy_stable,
st.logistic,st.loggamma,st.loglaplace,st.lognorm,st.lomax,st.maxwell,st.mielke,st.nakagami,st.ncx2,st.ncf,
st.nct,st.norm,st.pareto,st.pearson3,st.powerlaw,st.powerlognorm,st.powernorm,st.rdist,st.reciprocal,
st.rayleigh,st.rice,st.recipinvgauss,st.semicircular,st.t,st.triang,st.truncexpon,st.truncnorm,st.tukeylambda,
st.uniform,st.vonmises,st.vonmises_line,st.wald,st.weibull_min,st.weibull_max,st.wrapcauchy
]
# Best holders
best_distribution = st.norm
best_params = (0.0, 1.0)
best_sse = np.inf
# Estimate distribution parameters from data
for distribution in DISTRIBUTIONS:
# Try to fit the distribution
try:
# Ignore warnings from data that can't be fit
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
# fit dist to data
params = distribution.fit(data)
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Calculate fitted PDF and error with fit in distribution
pdf = distribution.pdf(x, loc=loc, scale=scale, *arg)
sse = np.sum(np.power(y - pdf, 2.0))
# if axis pass in add to plot
try:
if ax:
pd.Series(pdf, x).plot(ax=ax)
end
except Exception:
pass
# identify if this distribution is better
if best_sse > sse > 0:
best_distribution = distribution
best_params = params
best_sse = sse
except Exception:
pass
return (best_distribution.name, best_params)
def make_pdf(dist, params, size=10000):
"""Generate distributions's Propbability Distribution Function """
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Get sane start and end points of distribution
start = dist.ppf(0.01, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.01, loc=loc, scale=scale)
end = dist.ppf(0.99, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.99, loc=loc, scale=scale)
# Build PDF and turn into pandas Series
x = np.linspace(start, end, size)
y = dist.pdf(x, loc=loc, scale=scale, *arg)
pdf = pd.Series(y, x)
return pdf
# Load data from statsmodels datasets
data = pd.read_csv('/home/idies/workspace/AS.171.205/data/a100.csv')
# Plot for comparison
plt.figure(figsize=(12,8))
ax = data.plot(kind='hist', bins=75, normed=True, alpha=0.5, color=plt.rcParams['axes.color_cycle'][1])
# Save plot limits
dataYLim = ax.get_ylim()
# Find best fit distribution
best_fit_name, best_fir_paramms = best_fit_distribution(data, 200, ax)
best_dist = getattr(st, best_fit_name)
# Update plots
ax.set_ylim(dataYLim)
ax.set_title(u'a100.csv.\n All Fitted Distributions')
ax.set_xlabel(u'Value')
ax.set_ylabel('Probability')
# Make PDF
pdf = make_pdf(best_dist, best_fir_paramms)
# Display
plt.figure(figsize=(12,8))
ax = pdf.plot(lw=2, label='PDF', legend=True)
data.plot(kind='hist', bins=100, normed=True, alpha=0.5, label='Data', legend=True, ax=ax)
param_names = (best_dist.shapes + ', loc, scale').split(', ') if best_dist.shapes else ['loc', 'scale']
param_str = ', '.join(['{}={:0.2f}'.format(k,v) for k,v in zip(param_names, best_fir_paramms)])
dist_str = '{}({})'.format(best_fit_name, param_str)
ax.set_title(u'a100.csv. with best fit distribution \n' + 'Best fit is: ' + dist_str)
ax.set_xlabel(u'Value')
ax.set_ylabel('Probability')
# ## Continued, with b100:
#
# I found an interesting set of code from StackOverflow user, "tmthydvnprt"
# http://stackoverflow.com/questions/6620471/fitting-empirical-distribution-to-theoretical-ones-with-scipy-python
#
# With his code, every possible scipy.stats distribution is checked through a loop, and the data is plotted with every line from those distributions. Then, which ever is the best distribution is plotted independently. I have slightly modified his code and ran the three csv files through it, but changed the bin size for each csv to make the graph more readable. The computation time for the loop is slow (takes about 30 seconds), so it will take time to print the graphs.
#
# ## b100.csv with bins = 50
# In[42]:
get_ipython().magic('matplotlib inline')
import warnings
import numpy as np
import pandas as pd
import scipy.stats as st
import statsmodels as sm
import matplotlib
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore")
matplotlib.rcParams['figure.figsize'] = (16.0, 12.0)
matplotlib.style.use('ggplot')
# Create models from data
def best_fit_distribution(data, bins=75, ax=None):
"""Model data by finding best fit distribution to data"""
# Get histogram of original data
y, x = np.histogram(data, bins=bins, normed=True)
x = (x + np.roll(x, -1))[:-1] / 2.0
# Distributions to check
DISTRIBUTIONS = [
st.alpha,st.anglit,st.arcsine,st.beta,st.betaprime,st.bradford,st.burr,st.cauchy,st.chi,st.chi2,st.cosine,
st.dgamma,st.dweibull,st.erlang,st.expon,st.exponnorm,st.exponweib,st.exponpow,st.f,st.fatiguelife,st.fisk,
st.foldcauchy,st.foldnorm,st.frechet_r,st.frechet_l,st.genlogistic,st.genpareto,st.gennorm,st.genexpon,
st.genextreme,st.gausshyper,st.gamma,st.gengamma,st.genhalflogistic,st.gilbrat,st.gompertz,st.gumbel_r,
st.gumbel_l,st.halfcauchy,st.halflogistic,st.halfnorm,st.halfgennorm,st.hypsecant,st.invgamma,st.invgauss,
st.invweibull,st.johnsonsb,st.johnsonsu,st.ksone,st.kstwobign,st.laplace,st.levy,st.levy_l,st.levy_stable,
st.logistic,st.loggamma,st.loglaplace,st.lognorm,st.lomax,st.maxwell,st.mielke,st.nakagami,st.ncx2,st.ncf,
st.nct,st.norm,st.pareto,st.pearson3,st.powerlaw,st.powerlognorm,st.powernorm,st.rdist,st.reciprocal,
st.rayleigh,st.rice,st.recipinvgauss,st.semicircular,st.t,st.triang,st.truncexpon,st.truncnorm,st.tukeylambda,
st.uniform,st.vonmises,st.vonmises_line,st.wald,st.weibull_min,st.weibull_max,st.wrapcauchy
]
# Best holders
best_distribution = st.norm
best_params = (0.0, 1.0)
best_sse = np.inf
# Estimate distribution parameters from data
for distribution in DISTRIBUTIONS:
# Try to fit the distribution
try:
# Ignore warnings from data that can't be fit
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
# fit dist to data
params = distribution.fit(data)
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Calculate fitted PDF and error with fit in distribution
pdf = distribution.pdf(x, loc=loc, scale=scale, *arg)
sse = np.sum(np.power(y - pdf, 2.0))
# if axis pass in add to plot
try:
if ax:
pd.Series(pdf, x).plot(ax=ax)
end
except Exception:
pass
# identify if this distribution is better
if best_sse > sse > 0:
best_distribution = distribution
best_params = params
best_sse = sse
except Exception:
pass
return (best_distribution.name, best_params)
def make_pdf(dist, params, size=10000):
"""Generate distributions's Propbability Distribution Function """
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Get sane start and end points of distribution
start = dist.ppf(0.01, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.01, loc=loc, scale=scale)
end = dist.ppf(0.99, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.99, loc=loc, scale=scale)
# Build PDF and turn into pandas Series
x = np.linspace(start, end, size)
y = dist.pdf(x, loc=loc, scale=scale, *arg)
pdf = pd.Series(y, x)
return pdf
# Load data from statsmodels datasets
data = pd.read_csv('/home/idies/workspace/AS.171.205/data/b100.csv')
# Plot for comparison
plt.figure(figsize=(12,8))
ax = data.plot(kind='hist', bins=75, normed=True, alpha=0.5, color=plt.rcParams['axes.color_cycle'][1])
# Save plot limits
dataYLim = ax.get_ylim()
# Find best fit distribution
best_fit_name, best_fir_paramms = best_fit_distribution(data, 200, ax)
best_dist = getattr(st, best_fit_name)
# Update plots
ax.set_ylim(dataYLim)
ax.set_title(u'b100.csv.\n All Fitted Distributions')
ax.set_xlabel(u'Value')
ax.set_ylabel('Probability')
# Make PDF
pdf = make_pdf(best_dist, best_fir_paramms)
# Display
plt.figure(figsize=(12,8))
ax = pdf.plot(lw=2, label='PDF', legend=True)
data.plot(kind='hist', bins=50, normed=True, alpha=0.5, label='Data', legend=True, ax=ax)
param_names = (best_dist.shapes + ', loc, scale').split(', ') if best_dist.shapes else ['loc', 'scale']
param_str = ', '.join(['{}={:0.2f}'.format(k,v) for k,v in zip(param_names, best_fir_paramms)])
dist_str = '{}({})'.format(best_fit_name, param_str)
ax.set_title(u'b100.csv. with best fit distribution \n' + 'Best fit is: ' + dist_str)
ax.set_xlabel(u'Value')
ax.set_ylabel('Probability')
# ## Continued, with s057.csv:
#
# I found an interesting set of code from StackOverflow user, "tmthydvnprt"
# http://stackoverflow.com/questions/6620471/fitting-empirical-distribution-to-theoretical-ones-with-scipy-python
#
# With his code, every possible scipy.stats distribution is checked through a loop, and the data is plotted with every line from those distributions. Then, which ever is the best distribution is plotted independently. I have slightly modified his code and ran the three csv files through it, but changed the bin size for each csv to make the graph more readable. The computation time for the loop is slow (takes about 30 seconds), so it will take time to print the graphs.
#
# ## s057.csv
# In[45]:
get_ipython().magic('matplotlib inline')
import warnings
import numpy as np
import pandas as pd
import scipy.stats as st
import statsmodels as sm
import matplotlib
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore")
matplotlib.rcParams['figure.figsize'] = (16.0, 12.0)
matplotlib.style.use('ggplot')
# Create models from data
def best_fit_distribution(data, bins=75, ax=None):
"""Model data by finding best fit distribution to data"""
# Get histogram of original data
y, x = np.histogram(data, bins=bins, normed=True)
x = (x + np.roll(x, -1))[:-1] / 2.0
# Distributions to check
DISTRIBUTIONS = [
st.alpha,st.anglit,st.arcsine,st.beta,st.betaprime,st.bradford,st.burr,st.cauchy,st.chi,st.chi2,st.cosine,
st.dgamma,st.dweibull,st.erlang,st.expon,st.exponnorm,st.exponweib,st.exponpow,st.f,st.fatiguelife,st.fisk,
st.foldcauchy,st.foldnorm,st.frechet_r,st.frechet_l,st.genlogistic,st.genpareto,st.gennorm,st.genexpon,
st.genextreme,st.gausshyper,st.gamma,st.gengamma,st.genhalflogistic,st.gilbrat,st.gompertz,st.gumbel_r,
st.gumbel_l,st.halfcauchy,st.halflogistic,st.halfnorm,st.halfgennorm,st.hypsecant,st.invgamma,st.invgauss,
st.invweibull,st.johnsonsb,st.johnsonsu,st.ksone,st.kstwobign,st.laplace,st.levy,st.levy_l,st.levy_stable,
st.logistic,st.loggamma,st.loglaplace,st.lognorm,st.lomax,st.maxwell,st.mielke,st.nakagami,st.ncx2,st.ncf,
st.nct,st.norm,st.pareto,st.pearson3,st.powerlaw,st.powerlognorm,st.powernorm,st.rdist,st.reciprocal,
st.rayleigh,st.rice,st.recipinvgauss,st.semicircular,st.t,st.triang,st.truncexpon,st.truncnorm,st.tukeylambda,
st.uniform,st.vonmises,st.vonmises_line,st.wald,st.weibull_min,st.weibull_max,st.wrapcauchy
]
# Best holders
best_distribution = st.norm
best_params = (0.0, 1.0)
best_sse = np.inf
# Estimate distribution parameters from data
for distribution in DISTRIBUTIONS:
# Try to fit the distribution
try:
# Ignore warnings from data that can't be fit
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
# fit dist to data
params = distribution.fit(data)
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Calculate fitted PDF and error with fit in distribution
pdf = distribution.pdf(x, loc=loc, scale=scale, *arg)
sse = np.sum(np.power(y - pdf, 2.0))
# if axis pass in add to plot
try:
if ax:
pd.Series(pdf, x).plot(ax=ax)
end
except Exception:
pass
# identify if this distribution is better
if best_sse > sse > 0:
best_distribution = distribution
best_params = params
best_sse = sse
except Exception:
pass
return (best_distribution.name, best_params)
def make_pdf(dist, params, size=10000):
"""Generate distributions's Propbability Distribution Function """
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Get sane start and end points of distribution
start = dist.ppf(0.01, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.01, loc=loc, scale=scale)
end = dist.ppf(0.99, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.99, loc=loc, scale=scale)
# Build PDF and turn into pandas Series
x = np.linspace(start, end, size)
y = dist.pdf(x, loc=loc, scale=scale, *arg)
pdf = pd.Series(y, x)
return pdf
# Load data from statsmodels datasets
data = pd.read_csv('/home/idies/workspace/AS.171.205/data/s057.csv')
# Plot for comparison
plt.figure(figsize=(12,8))
ax = data.plot(kind='hist', bins=75, normed=True, alpha=0.5, color=plt.rcParams['axes.color_cycle'][1])
# Save plot limits
dataYLim = ax.get_ylim()
# Find best fit distribution
best_fit_name, best_fir_paramms = best_fit_distribution(data, 200, ax)
best_dist = getattr(st, best_fit_name)
# Update plots
ax.set_ylim(dataYLim)
ax.set_title(u's057.csv.\n All Fitted Distributions')
ax.set_xlabel(u'Value')
ax.set_ylabel('Probability')
# Make PDF
pdf = make_pdf(best_dist, best_fir_paramms)
# Display
plt.figure(figsize=(12,8))
ax = pdf.plot(lw=2, label='PDF', legend=True)
data.plot(kind='hist', bins=100, normed=True, alpha=0.5, label='Data', legend=True, ax=ax)
param_names = (best_dist.shapes + ', loc, scale').split(', ') if best_dist.shapes else ['loc', 'scale']
param_str = ', '.join(['{}={:0.2f}'.format(k,v) for k,v in zip(param_names, best_fir_paramms)])
dist_str = '{}({})'.format(best_fit_name, param_str)
ax.set_title(u's057.csv. with best fit distribution \n' + 'Best fit is: ' + dist_str)
ax.set_xlabel(u'Value')
ax.set_ylabel('Probability') | 37.721154 | 472 | 0.679939 | 2,934 | 19,615 | 4.485344 | 0.12713 | 0.014362 | 0.015046 | 0.021885 | 0.944757 | 0.941261 | 0.939438 | 0.939438 | 0.92462 | 0.916945 | 0 | 0.029369 | 0.199745 | 19,615 | 520 | 473 | 37.721154 | 0.809008 | 0.279633 | 0 | 0.899281 | 0 | 0 | 0.086095 | 0.019752 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021583 | false | 0.021583 | 0.093525 | 0 | 0.136691 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f3cf9577d643ee1c20522df64ef1c5f618a8ca38 | 63 | py | Python | examples/modules/model_selection.py | snehankekre/streamlit-yellowbrick | fd94bf4554966390ee578831612350d613aa3de7 | [
"MIT"
] | 7 | 2021-06-08T10:24:19.000Z | 2022-02-02T11:57:56.000Z | examples/modules/model_selection.py | snehankekre/streamlit-yellowbrick | fd94bf4554966390ee578831612350d613aa3de7 | [
"MIT"
] | null | null | null | examples/modules/model_selection.py | snehankekre/streamlit-yellowbrick | fd94bf4554966390ee578831612350d613aa3de7 | [
"MIT"
] | null | null | null | import streamlit as st
def run_model_selection():
return
| 10.5 | 26 | 0.746032 | 9 | 63 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.206349 | 63 | 5 | 27 | 12.6 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
f3da9aa5a4b4feaf5f159d38f42bb62bf9cc7729 | 580 | py | Python | orders/services/complaints.py | Wipersee/profielp | e6144c51dbdb457dc85295904902bb8353ebda2e | [
"MIT"
] | null | null | null | orders/services/complaints.py | Wipersee/profielp | e6144c51dbdb457dc85295904902bb8353ebda2e | [
"MIT"
] | 1 | 2021-11-15T10:20:28.000Z | 2021-11-15T10:20:28.000Z | orders/services/complaints.py | Wipersee/profielp | e6144c51dbdb457dc85295904902bb8353ebda2e | [
"MIT"
] | null | null | null | # Here would be business logic
from orders.models import *
def get(id: int) -> dict:
return {"user": 1, "username": "test", "role": 2}
def update(id: int) -> dict:
return {"user": 1, "username": "test", "role": 2}
def create(id: int) -> dict:
return {"user": 1, "username": "test", "role": 2}
def delete(id: int) -> dict:
return {"user": 1, "username": "test", "role": 2}
def get_all(id: int) -> dict:
return {"user": 1, "username": "test", "role": 2}
def get_all_with_filter(id: int) -> dict:
return {"user": 1, "username": "test", "role": 2}
| 21.481481 | 53 | 0.574138 | 85 | 580 | 3.870588 | 0.317647 | 0.091185 | 0.164134 | 0.273556 | 0.756839 | 0.756839 | 0.756839 | 0.756839 | 0.756839 | 0.756839 | 0 | 0.025974 | 0.203448 | 580 | 26 | 54 | 22.307692 | 0.686147 | 0.048276 | 0 | 0.461538 | 0 | 0 | 0.218182 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.461538 | false | 0 | 0.076923 | 0.461538 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 9 |
6d53158dc090e806ee77f6f7782e2f6c77d0a7b7 | 2,172 | py | Python | migrations/versions/840486fe2851_add_supporting_flag_to_tables_macros_.py | ukblumf/randomise-it | 0610721eba649dfa205b0d3c4b3e24d67aa1d781 | [
"MIT"
] | null | null | null | migrations/versions/840486fe2851_add_supporting_flag_to_tables_macros_.py | ukblumf/randomise-it | 0610721eba649dfa205b0d3c4b3e24d67aa1d781 | [
"MIT"
] | null | null | null | migrations/versions/840486fe2851_add_supporting_flag_to_tables_macros_.py | ukblumf/randomise-it | 0610721eba649dfa205b0d3c4b3e24d67aa1d781 | [
"MIT"
] | null | null | null | """Add SUPPORTING flag to tables, macros and collections
Revision ID: 840486fe2851
Revises: ea0bb459dec9
Create Date: 2020-07-28 10:14:39.818140
"""
# revision identifiers, used by Alembic.
revision = '840486fe2851'
down_revision = 'ea0bb459dec9'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('collection', schema=None) as batch_op:
batch_op.add_column(sa.Column('supporting', sa.Boolean(), nullable=True))
with op.batch_alter_table('macros', schema=None) as batch_op:
batch_op.add_column(sa.Column('supporting', sa.Boolean(), nullable=True))
with op.batch_alter_table('public_collection', schema=None) as batch_op:
batch_op.add_column(sa.Column('supporting', sa.Boolean(), nullable=True))
with op.batch_alter_table('public_macros', schema=None) as batch_op:
batch_op.add_column(sa.Column('supporting', sa.Boolean(), nullable=True))
with op.batch_alter_table('public_random_table', schema=None) as batch_op:
batch_op.add_column(sa.Column('supporting', sa.Boolean(), nullable=True))
with op.batch_alter_table('random_table', schema=None) as batch_op:
batch_op.add_column(sa.Column('supporting', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('random_table', schema=None) as batch_op:
batch_op.drop_column('supporting')
with op.batch_alter_table('public_random_table', schema=None) as batch_op:
batch_op.drop_column('supporting')
with op.batch_alter_table('public_macros', schema=None) as batch_op:
batch_op.drop_column('supporting')
with op.batch_alter_table('public_collection', schema=None) as batch_op:
batch_op.drop_column('supporting')
with op.batch_alter_table('macros', schema=None) as batch_op:
batch_op.drop_column('supporting')
with op.batch_alter_table('collection', schema=None) as batch_op:
batch_op.drop_column('supporting')
# ### end Alembic commands ###
| 35.606557 | 81 | 0.718232 | 302 | 2,172 | 4.930464 | 0.18543 | 0.112827 | 0.08865 | 0.128946 | 0.80591 | 0.80591 | 0.80591 | 0.80591 | 0.80591 | 0.80591 | 0 | 0.027293 | 0.156538 | 2,172 | 60 | 82 | 36.2 | 0.78548 | 0.154236 | 0 | 0.8 | 0 | 0 | 0.165556 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.066667 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
edd23f00cc12ac800521a4fa7877e9371919befe | 138 | py | Python | holocron/__init__.py | chenjun2hao/Holocron | 039cdb5238df523ca8a09fea31a2ac9d5f04a0ba | [
"MIT"
] | 1 | 2019-11-28T10:01:58.000Z | 2019-11-28T10:01:58.000Z | holocron/__init__.py | chenjun2hao/Holocron | 039cdb5238df523ca8a09fea31a2ac9d5f04a0ba | [
"MIT"
] | null | null | null | holocron/__init__.py | chenjun2hao/Holocron | 039cdb5238df523ca8a09fea31a2ac9d5f04a0ba | [
"MIT"
] | null | null | null | from holocron import models
from holocron import nn
from holocron import optim
from holocron import utils
from .version import __version__ | 27.6 | 32 | 0.855072 | 20 | 138 | 5.7 | 0.4 | 0.421053 | 0.631579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137681 | 138 | 5 | 32 | 27.6 | 0.957983 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b685b2c877388b94c6393c0ba2b8c6e2be632d86 | 18 | py | Python | sample.py | barded1998/sample_60182226 | bb85bbf2c2ba6ae219584577e936b2171d577565 | [
"MIT"
] | null | null | null | sample.py | barded1998/sample_60182226 | bb85bbf2c2ba6ae219584577e936b2171d577565 | [
"MIT"
] | null | null | null | sample.py | barded1998/sample_60182226 | bb85bbf2c2ba6ae219584577e936b2171d577565 | [
"MIT"
] | null | null | null | print("60182226")
| 9 | 17 | 0.722222 | 2 | 18 | 6.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.470588 | 0.055556 | 18 | 1 | 18 | 18 | 0.294118 | 0 | 0 | 0 | 0 | 0 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
1e5096b464eff408ce038f3aed50218014cc1dc7 | 24 | py | Python | Lib/test/test_compiler/testcorpus/03_dict_ex.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 1,886 | 2021-05-03T23:58:43.000Z | 2022-03-31T19:15:58.000Z | Lib/test/test_compiler/testcorpus/03_dict_ex.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 70 | 2021-05-04T23:25:35.000Z | 2022-03-31T18:42:08.000Z | Lib/test/test_compiler/testcorpus/03_dict_ex.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 52 | 2021-05-04T21:26:03.000Z | 2022-03-08T18:02:56.000Z | {1: 2, **a, 3: 4, 5: 6}
| 12 | 23 | 0.291667 | 7 | 24 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.352941 | 0.291667 | 24 | 1 | 24 | 24 | 0.058824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1e64065778ca71fce0e200ed1b7b2c88aa897c15 | 8,383 | py | Python | undine/setup/generator/__setup_env__.py | Sungup/Undine | 8b130b86bab8ae2a1662191d3352ea11987429da | [
"MIT"
] | 1 | 2018-01-01T07:50:04.000Z | 2018-01-01T07:50:04.000Z | undine/setup/generator/__setup_env__.py | Sungup/Undine | 8b130b86bab8ae2a1662191d3352ea11987429da | [
"MIT"
] | null | null | null | undine/setup/generator/__setup_env__.py | Sungup/Undine | 8b130b86bab8ae2a1662191d3352ea11987429da | [
"MIT"
] | null | null | null | from collections import namedtuple
__OptItem = namedtuple('__OptItem',
('name', 'type', 'default', 'metavar', 'visible',
'help'))
class __Options:
def __init__(self, name, *options):
self.__name = name
self.__options = options
def __optname(self, name, prefix=''):
return '{}{}.{}'.format(prefix, self.__name, name)
def add_argument(self, parser):
for opt in self.__options:
if not opt.visible:
continue
parser.add_argument('--{}'.format(self.__optname(opt.name)),
dest=self.__optname(opt.name),
type=opt.type, default=opt.default,
metavar=opt.metavar, action='store',
help=opt.help)
def parse_args(self, **kwargs):
return {
o.name: kwargs[self.__optname(o.name)] if o.visible else o.default
for o in self.__options
}
@property
def name(self):
return self.__name
__DEFAULT_CONFIG = {
'mariadb-cli': [
__Options(
'database',
__OptItem('type', str, 'mariadb', 'DB_TYPE', False,
"Database type. Don't use this option."),
__OptItem('host', str, '<DB_HOST_ADDRESS>', 'ADDRESS', True,
'Your database host address'),
__OptItem('database', str, '<DB_NAME>', 'DB_NAME', True,
'Database name'),
__OptItem('user', str, '<DB_USER_ID>', 'ID', True,
'Database account id'),
__OptItem('password', str, '<DB_USER_PWD>', 'PASSWORD', True,
'Database account password')
),
__Options(
'task_queue',
__OptItem('host', str, '<RABBITMQ_HOST_ADDRESS>', 'ADDRESS', True,
'Your RabbitMQ host address for the global task queue'),
__OptItem('vhost', str, '<RABBITMQ_VHOST_NAME>', 'NAME', True,
'RabbitMQ vhost name'),
__OptItem('queue', str, '<RABBITMQ_QUEUE_NAME>', 'QUEUE', True,
'RabbitMQ task queue name'),
__OptItem('user', str, '<RABBITMQ_USER_ID>', 'ID', True,
'RabbitMQ account id'),
__OptItem('password', str, '<RABBITMQ_USER_PWD>', 'PASSWORD', True,
'RabbitMQ account password')
),
__Options(
'rpc',
__OptItem('host', str, '<RABBITMQ_HOST_ADDRESS>', 'ADDRESS', True,
'Your RabbitMQ host address for the RPC server'),
__OptItem('vhost', str, '<RABBITMQ_VHOST_NAME>', 'NAME', True,
'RabbitMQ vhost name for the RPC'),
__OptItem('user', str, '<RABBITMQ_USER_ID>', 'ID', True,
'RabbitMQ account id for the RPC'),
__OptItem('password', str, '<RABBITMQ_USER_PWD>', 'PASSWORD', True,
'RabbitMQ account password for the RPC')
)
],
'mariadb-svr': [
__Options(
'manager',
__OptItem('config_dir', str, '/tmp/undine/config', 'DIR', True,
'Config directory for the temporary task config file.'),
__OptItem('result_dir', str, '/tmp/undine/result', 'DIR', True,
'Result directory for the temporary task result file.'),
__OptItem('result_ext', str, '.log', 'EXT', True,
'Result file extension'),
__OptItem('input_dir', str, '<INPUT_FILE_HOME_PATH>', 'DIR', True,
"Input files' home directory")
),
__Options(
'scheduler',
__OptItem('worker_max', int, 16, 'WORKERS', True,
'Number of total workers on this system.'),
__OptItem('log_file', str, '/tmp/undine/sched.log', 'PATH', True,
'Scheduler log file path'),
__OptItem('log_level', str, 'info', 'LEVEL', True,
'Scheduler log inform level'),
__OptItem('task_interval', int, 1, 'SEC', True,
'Sleep interval between tasks')
),
__Options(
'driver',
__OptItem('type', str, 'mariadb', 'DB_TYPE', False,
"Database type. Don't use this option."),
__OptItem('config_ext', str, '.json', 'EXT', True,
'File extension of the temporary config file'),
__OptItem('log_file', str, '/tmp/undine/driver.log', 'PATH', True,
'Task driver log file path'),
__OptItem('log_level', str, 'info', 'LEVEL', True,
'Task driver log inform level'),
__OptItem('host', str, '<DB_HOST_ADDRESS>', 'ADDRESS', True,
'Your database host address'),
__OptItem('database', str, '<DB_NAME>', 'DB_NAME', True,
'Database name'),
__OptItem('user', str, '<DB_USER_ID>', 'ID', True,
'Database account id'),
__OptItem('password', str, '<DB_USER_PWD>', 'PASSWORD', True,
'Database account password')
),
__Options(
'task_queue',
__OptItem('host', str, '<RABBITMQ_HOST_ADDRESS>', 'ADDRESS', True,
'Your RabbitMQ host address for the global task queue'),
__OptItem('vhost', str, '<RABBITMQ_VHOST_NAME>', 'NAME', True,
'RabbitMQ vhost name'),
__OptItem('queue', str, '<RABBITMQ_QUEUE_NAME>', 'QUEUE', True,
'RabbitMQ task queue name'),
__OptItem('user', str, '<RABBITMQ_USER_ID>', 'ID', True,
'RabbitMQ account id'),
__OptItem('password', str, '<RABBITMQ_USER_PWD>', 'PASSWORD', True,
'RabbitMQ account password')
),
__Options(
'rpc',
__OptItem('host', str, '<RABBITMQ_HOST_ADDRESS>', 'ADDRESS', True,
'Your RabbitMQ host address for the RPC server'),
__OptItem('vhost', str, '<RABBITMQ_VHOST_NAME>', 'NAME', True,
'RabbitMQ vhost name for the RPC'),
__OptItem('user', str, '<RABBITMQ_USER_ID>', 'ID', True,
'RabbitMQ account id for the RPC'),
__OptItem('password', str, '<RABBITMQ_USER_PWD>', 'PASSWORD', True,
'RabbitMQ account password for the RPC')
)
],
'sqlite3': [
__Options(
'manager',
__OptItem('config_dir', str, '/tmp/undine/config', 'DIR', True,
'Config directory for the temporary task config file.'),
__OptItem('result_dir', str, '/tmp/undine/result', 'DIR', True,
'Result directory for the temporary task result file.'),
__OptItem('result_ext', str, '.log', 'EXT', True,
'Result file extension'),
__OptItem('input_dir', str, '<INPUT_FILE_HOME_PATH>', 'DIR', True,
"Input files' home directory")
),
__Options(
'scheduler',
__OptItem('worker_max', int, 16, 'WORKERS', True,
'Number of total workers on this system.'),
__OptItem('log_file', str, '/tmp/undine/sched.log', 'PATH', True,
'Scheduler log file path'),
__OptItem('log_level', str, 'info', 'LEVEL', True,
'Scheduler log inform level')
),
__Options(
'driver',
__OptItem('type', str, 'sqlite', 'DB_TYPE', False,
"Database type. Don't use this option."),
__OptItem('config_ext', str, '.json', 'EXT', True,
'File extension of the temporary config file'),
__OptItem('log_file', str, '/tmp/undine/driver.log', 'PATH', True,
'Task driver log file path'),
__OptItem('log_level', str, 'info', 'LEVEL', True,
'Task driver log inform level'),
__OptItem('db_file', str, 'missions.sqlite3', 'PATH', True,
'SQLite3 DB file containing all configurations to run')
)
]
}
| 46.314917 | 79 | 0.505308 | 814 | 8,383 | 4.902948 | 0.140049 | 0.049612 | 0.038086 | 0.033074 | 0.819093 | 0.80907 | 0.80907 | 0.80907 | 0.80907 | 0.80907 | 0 | 0.001494 | 0.361207 | 8,383 | 180 | 80 | 46.572222 | 0.743791 | 0 | 0 | 0.741176 | 0 | 0 | 0.370989 | 0.041513 | 0 | 0 | 0 | 0 | 0 | 1 | 0.029412 | false | 0.070588 | 0.005882 | 0.017647 | 0.058824 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
94b339616d7890298adf2c606482d9584c452f38 | 18,498 | py | Python | test/user1_time.py | time-track-tool/time-track-tool | a1c280f32a7766e460c862633b748fa206256f24 | [
"MIT"
] | null | null | null | test/user1_time.py | time-track-tool/time-track-tool | a1c280f32a7766e460c862633b748fa206256f24 | [
"MIT"
] | 1 | 2019-07-03T13:32:38.000Z | 2019-07-03T13:32:38.000Z | test/user1_time.py | time-track-tool/time-track-tool | a1c280f32a7766e460c862633b748fa206256f24 | [
"MIT"
] | 1 | 2019-05-15T16:01:31.000Z | 2019-05-15T16:01:31.000Z | from roundup import date
def import_data_1 (db, user) :
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-01-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-01-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-01-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-01-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-01-27')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-01-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-01-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-13')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-16')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-02-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-03-20')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-03-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-03-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-03-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-03-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-03-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2006-03-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-13')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-30')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-02')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-08')
)
db.time_record.create \
( daily_record = dr
, duration = 2.0
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-13')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-16')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-20')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-24')
)
db.time_record.create \
( daily_record = dr
, duration = 1.0
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-25')
)
db.time_record.create \
( daily_record = dr
, duration = 2.0
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-27')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-12-30')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-27')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-02')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-13')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-16')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-20')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-11-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-02')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-13')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-16')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-20')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-26')
)
db.time_record.create \
( daily_record = dr
, duration = 2.0
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-27')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2009-10-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-01')
)
db.time_record.create \
( daily_record = dr
, duration = 2.0
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-02')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-16')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-20')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-31')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-02')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-02-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-27')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2010-01-30')
)
db.commit ()
# end def import_data_1
| 27.858434 | 42 | 0.444372 | 2,133 | 18,498 | 3.771214 | 0.026723 | 0.312282 | 0.175659 | 0.292765 | 0.98993 | 0.98993 | 0.98993 | 0.98993 | 0.985082 | 0.985082 | 0 | 0.11806 | 0.414802 | 18,498 | 663 | 43 | 27.900452 | 0.625035 | 0.001135 | 0 | 0.511346 | 0 | 0 | 0.085521 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.001513 | false | 0 | 0.003026 | 0 | 0.004539 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
94ce6e778bfe4977634e59c967dce9f4d9afa0fb | 299 | py | Python | rfvision/components/__init__.py | mvig-robotflow/rfvision | cc662f213dfe5a3e8864a6b5685a668a4436e397 | [
"Apache-2.0"
] | 6 | 2021-09-25T03:53:06.000Z | 2022-02-19T03:25:11.000Z | rfvision/components/__init__.py | mvig-robotflow/rfvision | cc662f213dfe5a3e8864a6b5685a668a4436e397 | [
"Apache-2.0"
] | 1 | 2021-07-21T13:14:54.000Z | 2021-07-21T13:14:54.000Z | rfvision/components/__init__.py | mvig-robotflow/rfvision | cc662f213dfe5a3e8864a6b5685a668a4436e397 | [
"Apache-2.0"
] | 2 | 2021-07-16T03:25:04.000Z | 2021-11-22T06:04:01.000Z | from .backbones import * # noqa: F401,F403
from .dense_heads import * # noqa: F401,F403
from .losses import * # noqa: F401,F403
from .losses_pose import *
from .necks import * # noqa: F401,F403
from .roi_heads import * # noqa: F401,F403
from .fusion_layers import *
from .keypoint_head import * | 37.375 | 45 | 0.725753 | 44 | 299 | 4.818182 | 0.363636 | 0.235849 | 0.330189 | 0.424528 | 0.622642 | 0.415094 | 0 | 0 | 0 | 0 | 0 | 0.120968 | 0.170569 | 299 | 8 | 46 | 37.375 | 0.733871 | 0.264214 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bf6b5b5a19a70ae6c187f08033ac89de4967fbc9 | 37,787 | py | Python | kuryr_libnetwork/tests/unit/test_kuryr.py | celebdor/kuryr-libnetwork | 75f15770bc22ae6b55bb1d35437b5c9f8b964b67 | [
"Apache-2.0"
] | null | null | null | kuryr_libnetwork/tests/unit/test_kuryr.py | celebdor/kuryr-libnetwork | 75f15770bc22ae6b55bb1d35437b5c9f8b964b67 | [
"Apache-2.0"
] | null | null | null | kuryr_libnetwork/tests/unit/test_kuryr.py | celebdor/kuryr-libnetwork | 75f15770bc22ae6b55bb1d35437b5c9f8b964b67 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import uuid
import ddt
from oslo_serialization import jsonutils
from kuryr.lib import constants as lib_const
from kuryr.lib import utils as lib_utils
from kuryr_libnetwork import app
from kuryr_libnetwork import config
from kuryr_libnetwork import constants
from kuryr_libnetwork.tests.unit import base
from kuryr_libnetwork import utils
@ddt.ddt
class TestKuryr(base.TestKuryrBase):
"""Basic unitests for libnetwork remote driver URI endpoints.
This test class covers the following HTTP methods and URIs as described in
the remote driver specification as below:
https://github.com/docker/libnetwork/blob/3c8e06bc0580a2a1b2440fe0792fbfcd43a9feca/docs/remote.md # noqa
- POST /Plugin.Activate
- POST /NetworkDriver.GetCapabilities
- POST /NetworkDriver.CreateNetwork
- POST /NetworkDriver.DeleteNetwork
- POST /NetworkDriver.CreateEndpoint
- POST /NetworkDriver.EndpointOperInfo
- POST /NetworkDriver.DeleteEndpoint
- POST /NetworkDriver.Join
- POST /NetworkDriver.Leave
- POST /NetworkDriver.DiscoverNew
- POST /NetworkDriver.DiscoverDelete
"""
@ddt.data(('/Plugin.Activate', constants.SCHEMA['PLUGIN_ACTIVATE']),
('/NetworkDriver.GetCapabilities',
{'Scope': config.CONF.capability_scope}),
('/NetworkDriver.DiscoverNew', constants.SCHEMA['SUCCESS']),
('/NetworkDriver.DiscoverDelete', constants.SCHEMA['SUCCESS']))
@ddt.unpack
def test_remote_driver_endpoint(self, endpoint, expected):
response = self.app.post(endpoint)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(expected, decoded_json)
def test_network_driver_create_network(self):
docker_network_id = lib_utils.get_hash()
self.mox.StubOutWithMock(app.neutron, "create_network")
fake_request = {
"network": {
"name": utils.make_net_name(docker_network_id),
"admin_state_up": True
}
}
# The following fake response is retrieved from the Neutron doc:
# http://developer.openstack.org/api-ref-networking-v2.html#createNetwork # noqa
fake_neutron_net_id = "4e8e5957-649f-477b-9e5b-f1f75b21c03c"
fake_response = {
"network": {
"status": "ACTIVE",
"subnets": [],
"name": utils.make_net_name(docker_network_id),
"admin_state_up": True,
"tenant_id": "9bacb3c5d39d41a79512987f338cf177",
"router:external": False,
"segments": [],
"shared": False,
"id": fake_neutron_net_id
}
}
app.neutron.create_network(fake_request).AndReturn(fake_response)
self.mox.StubOutWithMock(app.neutron, "add_tag")
tags = utils.create_net_tags(docker_network_id)
for tag in tags:
app.neutron.add_tag('networks', fake_neutron_net_id, tag)
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
fake_existing_subnets_response = {
"subnets": []
}
fake_cidr_v4 = '192.168.42.0/24'
app.neutron.list_subnets(
network_id=fake_neutron_net_id,
cidr=fake_cidr_v4).AndReturn(fake_existing_subnets_response)
self.mox.StubOutWithMock(app.neutron, 'create_subnet')
fake_subnet_request = {
"subnets": [{
'name': fake_cidr_v4,
'network_id': fake_neutron_net_id,
'ip_version': 4,
'cidr': fake_cidr_v4,
'enable_dhcp': app.enable_dhcp,
'gateway_ip': '192.168.42.1',
}]
}
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
fake_neutron_net_id, subnet_v4_id,
name=fake_cidr_v4, cidr=fake_cidr_v4)
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
app.neutron.create_subnet(
fake_subnet_request).AndReturn(fake_subnet_response)
self.mox.ReplayAll()
network_request = {
'NetworkID': docker_network_id,
'IPv4Data': [{
'AddressSpace': 'foo',
'Pool': '192.168.42.0/24',
'Gateway': '192.168.42.1/24',
}],
'IPv6Data': [{
'AddressSpace': 'bar',
'Pool': 'fe80::/64',
'Gateway': 'fe80::f816:3eff:fe20:57c3/64',
}],
'Options': {}
}
response = self.app.post('/NetworkDriver.CreateNetwork',
content_type='application/json',
data=jsonutils.dumps(network_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
def test_network_driver_create_network_with_net_name_option(self):
docker_network_id = lib_utils.get_hash()
fake_neutron_net_id = "4e8e5957-649f-477b-9e5b-f1f75b21c03c"
self.mox.StubOutWithMock(app.neutron, "list_networks")
fake_neutron_net_name = 'my_network_name'
fake_existing_networks_response = {
"networks": [{
"status": "ACTIVE",
"subnets": [],
"admin_state_up": True,
"tenant_id": "9bacb3c5d39d41a79512987f338cf177",
"router:external": False,
"segments": [],
"shared": False,
"id": fake_neutron_net_id,
"name": "my_network_name"
}]
}
app.neutron.list_networks(
name=fake_neutron_net_name).AndReturn(
fake_existing_networks_response)
self.mox.StubOutWithMock(app.neutron, "add_tag")
tags = utils.create_net_tags(docker_network_id)
for tag in tags:
app.neutron.add_tag('networks', fake_neutron_net_id, tag)
app.neutron.add_tag(
'networks', fake_neutron_net_id, 'kuryr.net.existing')
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
fake_existing_subnets_response = {
"subnets": []
}
fake_cidr_v4 = '192.168.42.0/24'
app.neutron.list_subnets(
network_id=fake_neutron_net_id,
cidr=fake_cidr_v4).AndReturn(fake_existing_subnets_response)
self.mox.StubOutWithMock(app.neutron, 'create_subnet')
fake_subnet_request = {
"subnets": [{
'name': fake_cidr_v4,
'network_id': fake_neutron_net_id,
'ip_version': 4,
'cidr': fake_cidr_v4,
'enable_dhcp': app.enable_dhcp,
'gateway_ip': '192.168.42.1',
}]
}
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
fake_neutron_net_id, subnet_v4_id,
name=fake_cidr_v4, cidr=fake_cidr_v4)
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
app.neutron.create_subnet(
fake_subnet_request).AndReturn(fake_subnet_response)
self.mox.ReplayAll()
network_request = {
'NetworkID': docker_network_id,
'IPv4Data': [{
'AddressSpace': 'foo',
'Pool': '192.168.42.0/24',
'Gateway': '192.168.42.1/24',
}],
'IPv6Data': [{
'AddressSpace': 'bar',
'Pool': 'fe80::/64',
'Gateway': 'fe80::f816:3eff:fe20:57c3/64',
}],
'Options': {
'com.docker.network.enable_ipv6': False,
'com.docker.network.generic': {
'neutron.net.name': 'my_network_name'
}
}
}
response = self.app.post('/NetworkDriver.CreateNetwork',
content_type='application/json',
data=jsonutils.dumps(network_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
def test_network_driver_create_network_with_netid_option(self):
docker_network_id = lib_utils.get_hash()
fake_neutron_net_id = "4e8e5957-649f-477b-9e5b-f1f75b21c03c"
self.mox.StubOutWithMock(app.neutron, "list_networks")
fake_existing_networks_response = {
"networks": [{
"status": "ACTIVE",
"subnets": [],
"admin_state_up": True,
"tenant_id": "9bacb3c5d39d41a79512987f338cf177",
"router:external": False,
"segments": [],
"shared": False,
"id": fake_neutron_net_id,
}]
}
app.neutron.list_networks(
id=fake_neutron_net_id).AndReturn(
fake_existing_networks_response)
self.mox.StubOutWithMock(app.neutron, "add_tag")
tags = utils.create_net_tags(docker_network_id)
for tag in tags:
app.neutron.add_tag('networks', fake_neutron_net_id, tag)
app.neutron.add_tag(
'networks', fake_neutron_net_id, 'kuryr.net.existing')
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
fake_existing_subnets_response = {
"subnets": []
}
fake_cidr_v4 = '192.168.42.0/24'
app.neutron.list_subnets(
network_id=fake_neutron_net_id,
cidr=fake_cidr_v4).AndReturn(fake_existing_subnets_response)
self.mox.StubOutWithMock(app.neutron, 'create_subnet')
fake_subnet_request = {
"subnets": [{
'name': fake_cidr_v4,
'network_id': fake_neutron_net_id,
'ip_version': 4,
'cidr': fake_cidr_v4,
'enable_dhcp': app.enable_dhcp,
'gateway_ip': '192.168.42.1',
}]
}
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
fake_neutron_net_id, subnet_v4_id,
name=fake_cidr_v4, cidr=fake_cidr_v4)
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
app.neutron.create_subnet(
fake_subnet_request).AndReturn(fake_subnet_response)
self.mox.ReplayAll()
network_request = {
'NetworkID': docker_network_id,
'IPv4Data': [{
'AddressSpace': 'foo',
'Pool': '192.168.42.0/24',
'Gateway': '192.168.42.1/24',
}],
'IPv6Data': [{
'AddressSpace': 'bar',
'Pool': 'fe80::/64',
'Gateway': 'fe80::f816:3eff:fe20:57c3/64',
}],
'Options': {
'com.docker.network.enable_ipv6': False,
'com.docker.network.generic': {
'neutron.net.uuid': '4e8e5957-649f-477b-9e5b-f1f75b21c03c'
}
}
}
response = self.app.post('/NetworkDriver.CreateNetwork',
content_type='application/json',
data=jsonutils.dumps(network_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
def test_network_driver_create_network_with_pool_name_option(self):
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = "fake_pool_name"
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, name=fake_name)
app.neutron.list_subnetpools(name=fake_name).AndReturn(
{'subnetpools': kuryr_subnetpools['subnetpools']})
docker_network_id = lib_utils.get_hash()
self.mox.StubOutWithMock(app.neutron, "create_network")
fake_request = {
"network": {
"name": utils.make_net_name(docker_network_id),
"admin_state_up": True
}
}
# The following fake response is retrieved from the Neutron doc:
# http://developer.openstack.org/api-ref-networking-v2.html#createNetwork # noqa
fake_neutron_net_id = "4e8e5957-649f-477b-9e5b-f1f75b21c03c"
fake_response = {
"network": {
"status": "ACTIVE",
"subnets": [],
"name": utils.make_net_name(docker_network_id),
"admin_state_up": True,
"tenant_id": "9bacb3c5d39d41a79512987f338cf177",
"router:external": False,
"segments": [],
"shared": False,
"id": fake_neutron_net_id
}
}
app.neutron.create_network(fake_request).AndReturn(fake_response)
self.mox.StubOutWithMock(app.neutron, "add_tag")
tags = utils.create_net_tags(docker_network_id)
for tag in tags:
app.neutron.add_tag('networks', fake_neutron_net_id, tag)
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
fake_existing_subnets_response = {
"subnets": []
}
fake_cidr_v4 = '192.168.42.0/24'
app.neutron.list_subnets(
network_id=fake_neutron_net_id,
cidr=fake_cidr_v4).AndReturn(fake_existing_subnets_response)
self.mox.StubOutWithMock(app.neutron, 'create_subnet')
fake_subnet_request = {
"subnets": [{
'name': fake_cidr_v4,
'network_id': fake_neutron_net_id,
'ip_version': 4,
'cidr': fake_cidr_v4,
'enable_dhcp': app.enable_dhcp,
'gateway_ip': '192.168.42.1',
'subnetpool_id': fake_kuryr_subnetpool_id,
}]
}
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
fake_neutron_net_id, subnet_v4_id,
name=fake_cidr_v4, cidr=fake_cidr_v4)
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
app.neutron.create_subnet(
fake_subnet_request).AndReturn(fake_subnet_response)
self.mox.ReplayAll()
network_request = {
'NetworkID': docker_network_id,
'IPv4Data': [{
'AddressSpace': 'foo',
'Pool': '192.168.42.0/24',
'Gateway': '192.168.42.1/24',
}],
'IPv6Data': [{
'AddressSpace': 'bar',
'Pool': 'fe80::/64',
'Gateway': 'fe80::f816:3eff:fe20:57c3/64',
}],
'Options': {
'com.docker.network.enable_ipv6': False,
'com.docker.network.generic': {
'neutron.pool.name': 'fake_pool_name'
}
}
}
response = self.app.post('/NetworkDriver.CreateNetwork',
content_type='application/json',
data=jsonutils.dumps(network_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
def test_network_driver_create_network_wo_gw(self):
docker_network_id = lib_utils.get_hash()
self.mox.StubOutWithMock(app.neutron, "create_network")
fake_request = {
"network": {
"name": utils.make_net_name(docker_network_id),
"admin_state_up": True
}
}
# The following fake response is retrieved from the Neutron doc:
# http://developer.openstack.org/api-ref-networking-v2.html#createNetwork # noqa
fake_neutron_net_id = "4e8e5957-649f-477b-9e5b-f1f75b21c03c"
fake_response = {
"network": {
"status": "ACTIVE",
"subnets": [],
"name": utils.make_net_name(docker_network_id),
"admin_state_up": True,
"tenant_id": "9bacb3c5d39d41a79512987f338cf177",
"router:external": False,
"segments": [],
"shared": False,
"id": fake_neutron_net_id
}
}
app.neutron.create_network(fake_request).AndReturn(fake_response)
self.mox.StubOutWithMock(app.neutron, "add_tag")
tags = utils.create_net_tags(docker_network_id)
for tag in tags:
app.neutron.add_tag('networks', fake_neutron_net_id, tag)
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
fake_existing_subnets_response = {
"subnets": []
}
fake_cidr_v4 = '192.168.42.0/24'
app.neutron.list_subnets(
network_id=fake_neutron_net_id,
cidr=fake_cidr_v4).AndReturn(fake_existing_subnets_response)
self.mox.StubOutWithMock(app.neutron, 'create_subnet')
fake_subnet_request = {
"subnets": [{
'name': fake_cidr_v4,
'network_id': fake_neutron_net_id,
'ip_version': 4,
'cidr': fake_cidr_v4,
'enable_dhcp': app.enable_dhcp,
}]
}
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
fake_neutron_net_id, subnet_v4_id,
name=fake_cidr_v4, cidr=fake_cidr_v4)
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
app.neutron.create_subnet(
fake_subnet_request).AndReturn(fake_subnet_response)
self.mox.ReplayAll()
network_request = {
'NetworkID': docker_network_id,
'IPv4Data': [{
'AddressSpace': 'foo',
'Pool': '192.168.42.0/24',
}],
'IPv6Data': [{
'AddressSpace': 'bar',
'Pool': 'fe80::/64',
'Gateway': 'fe80::f816:3eff:fe20:57c3/64',
}],
'Options': {}
}
response = self.app.post('/NetworkDriver.CreateNetwork',
content_type='application/json',
data=jsonutils.dumps(network_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
def test_network_driver_create_network_with_network_id_not_exist(self):
docker_network_id = lib_utils.get_hash()
self.mox.StubOutWithMock(app.neutron, "list_networks")
fake_neutron_net_id = str(uuid.uuid4())
fake_existing_networks_response = {
"networks": []
}
app.neutron.list_networks(
id=fake_neutron_net_id).AndReturn(
fake_existing_networks_response)
self.mox.ReplayAll()
network_request = {
'NetworkID': docker_network_id,
'IPv4Data': [{
'AddressSpace': 'foo',
'Pool': '192.168.42.0/24',
}],
'IPv6Data': [{
'AddressSpace': 'bar',
'Pool': 'fe80::/64',
'Gateway': 'fe80::f816:3eff:fe20:57c3/64',
}],
'Options': {
constants.NETWORK_GENERIC_OPTIONS: {
constants.NEUTRON_UUID_OPTION: fake_neutron_net_id
}
}
}
response = self.app.post('/NetworkDriver.CreateNetwork',
content_type='application/json',
data=jsonutils.dumps(network_request))
self.assertEqual(500, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertIn('Err', decoded_json)
err_message = ("Specified network id/name({0}) does not "
"exist.").format(fake_neutron_net_id)
self.assertEqual({'Err': err_message}, decoded_json)
def test_network_driver_create_network_with_network_name_not_exist(self):
docker_network_id = lib_utils.get_hash()
self.mox.StubOutWithMock(app.neutron, "list_networks")
fake_neutron_network_name = "fake_network"
fake_existing_networks_response = {
"networks": []
}
app.neutron.list_networks(
name=fake_neutron_network_name).AndReturn(
fake_existing_networks_response)
self.mox.ReplayAll()
network_request = {
'NetworkID': docker_network_id,
'IPv4Data': [{
'AddressSpace': 'foo',
'Pool': '192.168.42.0/24',
}],
'IPv6Data': [{
'AddressSpace': 'bar',
'Pool': 'fe80::/64',
'Gateway': 'fe80::f816:3eff:fe20:57c3/64',
}],
'Options': {
constants.NETWORK_GENERIC_OPTIONS: {
constants.NEUTRON_NAME_OPTION: fake_neutron_network_name
}
}
}
response = self.app.post('/NetworkDriver.CreateNetwork',
content_type='application/json',
data=jsonutils.dumps(network_request))
self.assertEqual(500, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertIn('Err', decoded_json)
err_message = ("Specified network id/name({0}) does not "
"exist.").format(fake_neutron_network_name)
self.assertEqual({'Err': err_message}, decoded_json)
def test_network_driver_delete_network(self):
docker_network_id = lib_utils.get_hash()
fake_neutron_net_id = str(uuid.uuid4())
self._mock_out_network(fake_neutron_net_id, docker_network_id,
check_existing=True)
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
fake_neutron_subnets_response = {"subnets": []}
app.neutron.list_subnets(network_id=fake_neutron_net_id).AndReturn(
fake_neutron_subnets_response)
self.mox.StubOutWithMock(app.neutron, 'delete_network')
app.neutron.delete_network(fake_neutron_net_id).AndReturn(None)
self.mox.ReplayAll()
data = {'NetworkID': docker_network_id}
response = self.app.post('/NetworkDriver.DeleteNetwork',
content_type='application/json',
data=jsonutils.dumps(data))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
def test_network_driver_delete_network_with_subnets(self):
docker_network_id = lib_utils.get_hash()
docker_endpoint_id = lib_utils.get_hash()
fake_neutron_net_id = str(uuid.uuid4())
self._mock_out_network(fake_neutron_net_id, docker_network_id,
check_existing=True)
# The following fake response is retrieved from the Neutron doc:
# http://developer.openstack.org/api-ref-networking-v2.html#createSubnet # noqa
subnet_v4_id = "9436e561-47bf-436a-b1f1-fe23a926e031"
subnet_v6_id = "64dd4a98-3d7a-4bfd-acf4-91137a8d2f51"
fake_v4_subnet = self._get_fake_v4_subnet(
docker_network_id, docker_endpoint_id, subnet_v4_id)
fake_v6_subnet = self._get_fake_v6_subnet(
docker_network_id, docker_endpoint_id, subnet_v6_id)
fake_subnets_response = {
"subnets": [
fake_v4_subnet['subnet'],
fake_v6_subnet['subnet']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(network_id=fake_neutron_net_id).AndReturn(
fake_subnets_response)
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_subnetpools_response = {"subnetpools": []}
app.neutron.list_subnetpools(name='kuryr').AndReturn(
fake_subnetpools_response)
app.neutron.list_subnetpools(name='kuryr6').AndReturn(
fake_subnetpools_response)
self.mox.StubOutWithMock(app.neutron, 'delete_subnet')
app.neutron.delete_subnet(subnet_v4_id).AndReturn(None)
app.neutron.delete_subnet(subnet_v6_id).AndReturn(None)
self.mox.StubOutWithMock(app.neutron, 'delete_network')
app.neutron.delete_network(fake_neutron_net_id).AndReturn(None)
self.mox.ReplayAll()
data = {'NetworkID': docker_network_id}
response = self.app.post('/NetworkDriver.DeleteNetwork',
content_type='application/json',
data=jsonutils.dumps(data))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
def test_network_driver_create_endpoint(self):
docker_network_id = lib_utils.get_hash()
docker_endpoint_id = lib_utils.get_hash()
fake_neutron_net_id = str(uuid.uuid4())
self._mock_out_network(fake_neutron_net_id, docker_network_id)
# The following fake response is retrieved from the Neutron doc:
# http://developer.openstack.org/api-ref-networking-v2.html#createSubnet # noqa
subnet_v4_id = "9436e561-47bf-436a-b1f1-fe23a926e031"
subnet_v6_id = "64dd4a98-3d7a-4bfd-acf4-91137a8d2f51"
fake_v4_subnet = self._get_fake_v4_subnet(
docker_network_id, docker_endpoint_id, subnet_v4_id)
fake_v6_subnet = self._get_fake_v6_subnet(
docker_network_id, docker_endpoint_id, subnet_v6_id)
fake_subnetv4_response = {
"subnets": [
fake_v4_subnet['subnet']
]
}
fake_subnetv6_response = {
"subnets": [
fake_v6_subnet['subnet']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(network_id=fake_neutron_net_id,
cidr='192.168.1.0/24').AndReturn(fake_subnetv4_response)
app.neutron.list_subnets(
network_id=fake_neutron_net_id,
cidr='fe80::/64').AndReturn(fake_subnetv6_response)
fake_ipv4cidr = '192.168.1.2/24'
fake_ipv6cidr = 'fe80::f816:3eff:fe20:57c4/64'
fake_port_id = str(uuid.uuid4())
fake_port = self._get_fake_port(
docker_endpoint_id, fake_neutron_net_id,
fake_port_id, lib_const.PORT_STATUS_ACTIVE,
subnet_v4_id, subnet_v6_id)
fake_fixed_ips = ['subnet_id=%s' % subnet_v4_id,
'ip_address=192.168.1.2',
'subnet_id=%s' % subnet_v6_id,
'ip_address=fe80::f816:3eff:fe20:57c4']
fake_port_response = {
"ports": [
fake_port['port']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_ports')
app.neutron.list_ports(fixed_ips=fake_fixed_ips).AndReturn(
fake_port_response)
fake_updated_port = fake_port['port']
fake_updated_port['name'] = '-'.join([docker_endpoint_id, 'port'])
self.mox.StubOutWithMock(app.neutron, 'update_port')
app.neutron.update_port(fake_updated_port['id'], {'port': {
'name': fake_updated_port['name'],
'device_owner': lib_const.DEVICE_OWNER,
'device_id': docker_endpoint_id}}).AndReturn(fake_port)
self.mox.ReplayAll()
data = {
'NetworkID': docker_network_id,
'EndpointID': docker_endpoint_id,
'Options': {},
'Interface': {
'Address': fake_ipv4cidr,
'AddressIPv6': fake_ipv6cidr,
'MacAddress': "fa:16:3e:20:57:c3"
}
}
response = self.app.post('/NetworkDriver.CreateEndpoint',
content_type='application/json',
data=jsonutils.dumps(data))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
expected = {'Interface': {}}
self.assertEqual(expected, decoded_json)
def test_network_driver_endpoint_operational_info_with_no_port(self):
docker_network_id = lib_utils.get_hash()
docker_endpoint_id = lib_utils.get_hash()
fake_port_response = {"ports": []}
with mock.patch.object(app.neutron, 'list_ports') as mock_list_ports:
data = {
'NetworkID': docker_network_id,
'EndpointID': docker_endpoint_id,
}
mock_list_ports.return_value = fake_port_response
response = self.app.post('/NetworkDriver.EndpointOperInfo',
content_type='application/json',
data=jsonutils.dumps(data))
decoded_json = jsonutils.loads(response.data)
self.assertEqual(200, response.status_code)
port_name = utils.get_neutron_port_name(docker_endpoint_id)
mock_list_ports.assert_called_once_with(name=port_name)
self.assertEqual({}, decoded_json['Value'])
def test_network_driver_endpoint_operational_info(self):
docker_network_id = lib_utils.get_hash()
docker_endpoint_id = lib_utils.get_hash()
fake_neutron_net_id = str(uuid.uuid4())
fake_port_id = str(uuid.uuid4())
fake_port = self._get_fake_port(
docker_endpoint_id, fake_neutron_net_id,
fake_port_id, lib_const.PORT_STATUS_ACTIVE)
fake_port_response = {
"ports": [
fake_port['port']
]
}
with mock.patch.object(app.neutron, 'list_ports') as mock_list_ports:
data = {
'NetworkID': docker_network_id,
'EndpointID': docker_endpoint_id,
}
mock_list_ports.return_value = fake_port_response
response = self.app.post('/NetworkDriver.EndpointOperInfo',
content_type='application/json',
data=jsonutils.dumps(data))
decoded_json = jsonutils.loads(response.data)
self.assertEqual(200, response.status_code)
port_name = utils.get_neutron_port_name(docker_endpoint_id)
mock_list_ports.assert_called_once_with(name=port_name)
self.assertEqual(fake_port_response['ports'][0]['status'],
decoded_json['Value']['status'])
def test_network_driver_delete_endpoint(self):
docker_network_id = lib_utils.get_hash()
docker_endpoint_id = lib_utils.get_hash()
data = {
'NetworkID': docker_network_id,
'EndpointID': docker_endpoint_id,
}
response = self.app.post('/NetworkDriver.DeleteEndpoint',
content_type='application/json',
data=jsonutils.dumps(data))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
@ddt.data(
(False), (True))
def test_network_driver_join(self, vif_plug_is_fatal):
if vif_plug_is_fatal:
self.mox.StubOutWithMock(app, "vif_plug_is_fatal")
app.vif_plug_is_fatal = True
fake_docker_net_id = lib_utils.get_hash()
fake_docker_endpoint_id = lib_utils.get_hash()
fake_container_id = lib_utils.get_hash()
fake_neutron_net_id = str(uuid.uuid4())
fake_neutron_network = self._mock_out_network(
fake_neutron_net_id, fake_docker_net_id)
fake_neutron_port_id = str(uuid.uuid4())
self.mox.StubOutWithMock(app.neutron, 'list_ports')
neutron_port_name = utils.get_neutron_port_name(
fake_docker_endpoint_id)
fake_neutron_v4_subnet_id = str(uuid.uuid4())
fake_neutron_v6_subnet_id = str(uuid.uuid4())
fake_neutron_ports_response = self._get_fake_ports(
fake_docker_endpoint_id, fake_neutron_net_id,
fake_neutron_port_id, lib_const.PORT_STATUS_DOWN,
fake_neutron_v4_subnet_id, fake_neutron_v6_subnet_id)
app.neutron.list_ports(name=neutron_port_name).AndReturn(
fake_neutron_ports_response)
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
fake_neutron_subnets_response = self._get_fake_subnets(
fake_docker_endpoint_id, fake_neutron_net_id,
fake_neutron_v4_subnet_id, fake_neutron_v6_subnet_id)
app.neutron.list_subnets(network_id=fake_neutron_net_id).AndReturn(
fake_neutron_subnets_response)
fake_neutron_port = fake_neutron_ports_response['ports'][0]
fake_neutron_subnets = fake_neutron_subnets_response['subnets']
_, fake_peer_name, _ = self._mock_out_binding(
fake_docker_endpoint_id, fake_neutron_port,
fake_neutron_subnets, fake_neutron_network['networks'][0])
if vif_plug_is_fatal:
self.mox.StubOutWithMock(app.neutron, 'show_port')
fake_neutron_ports_response_2 = self._get_fake_port(
fake_docker_endpoint_id, fake_neutron_net_id,
fake_neutron_port_id, lib_const.PORT_STATUS_ACTIVE,
fake_neutron_v4_subnet_id, fake_neutron_v6_subnet_id)
app.neutron.show_port(fake_neutron_port_id).AndReturn(
fake_neutron_ports_response_2)
self.mox.ReplayAll()
fake_subnets_dict_by_id = {subnet['id']: subnet
for subnet in fake_neutron_subnets}
join_request = {
'NetworkID': fake_docker_net_id,
'EndpointID': fake_docker_endpoint_id,
'SandboxKey': utils.get_sandbox_key(fake_container_id),
'Options': {},
}
response = self.app.post('/NetworkDriver.Join',
content_type='application/json',
data=jsonutils.dumps(join_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
fake_neutron_v4_subnet = fake_subnets_dict_by_id[
fake_neutron_v4_subnet_id]
fake_neutron_v6_subnet = fake_subnets_dict_by_id[
fake_neutron_v6_subnet_id]
expected_response = {
'Gateway': fake_neutron_v4_subnet['gateway_ip'],
'GatewayIPv6': fake_neutron_v6_subnet['gateway_ip'],
'InterfaceName': {
'DstPrefix': config.CONF.binding.veth_dst_prefix,
'SrcName': fake_peer_name,
},
'StaticRoutes': []
}
self.assertEqual(expected_response, decoded_json)
def test_network_driver_leave(self):
fake_docker_net_id = lib_utils.get_hash()
fake_docker_endpoint_id = lib_utils.get_hash()
fake_neutron_net_id = str(uuid.uuid4())
self._mock_out_network(fake_neutron_net_id, fake_docker_net_id)
fake_neutron_port_id = str(uuid.uuid4())
self.mox.StubOutWithMock(app.neutron, 'list_ports')
neutron_port_name = utils.get_neutron_port_name(
fake_docker_endpoint_id)
fake_neutron_v4_subnet_id = str(uuid.uuid4())
fake_neutron_v6_subnet_id = str(uuid.uuid4())
fake_neutron_ports_response = self._get_fake_ports(
fake_docker_endpoint_id, fake_neutron_net_id,
fake_neutron_port_id, lib_const.PORT_STATUS_ACTIVE,
fake_neutron_v4_subnet_id, fake_neutron_v6_subnet_id)
app.neutron.list_ports(name=neutron_port_name).AndReturn(
fake_neutron_ports_response)
fake_neutron_port = fake_neutron_ports_response['ports'][0]
self._mock_out_unbinding(fake_docker_endpoint_id, fake_neutron_port)
leave_request = {
'NetworkID': fake_docker_net_id,
'EndpointID': fake_docker_endpoint_id,
}
response = self.app.post('/NetworkDriver.Leave',
content_type='application/json',
data=jsonutils.dumps(leave_request))
self.mox.ReplayAll()
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(constants.SCHEMA['SUCCESS'], decoded_json)
| 40.500536 | 111 | 0.594331 | 4,032 | 37,787 | 5.208333 | 0.076637 | 0.05919 | 0.042 | 0.046476 | 0.84281 | 0.820333 | 0.807 | 0.782714 | 0.767333 | 0.748 | 0 | 0.035854 | 0.303967 | 37,787 | 932 | 112 | 40.543991 | 0.762595 | 0.050123 | 0 | 0.734668 | 0 | 0 | 0.139708 | 0.039865 | 0 | 0 | 0 | 0 | 0.043805 | 1 | 0.020025 | false | 0 | 0.013767 | 0 | 0.035044 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bf763afc12e1c250f953415b19ff1e3f15a2f29d | 16,497 | py | Python | Code.py | Amirsorouri00/linear-regression | a98f8a799f427ed9f971c9c4d4130cd1c19ad41c | [
"Apache-2.0"
] | null | null | null | Code.py | Amirsorouri00/linear-regression | a98f8a799f427ed9f971c9c4d4130cd1c19ad41c | [
"Apache-2.0"
] | null | null | null | Code.py | Amirsorouri00/linear-regression | a98f8a799f427ed9f971c9c4d4130cd1c19ad41c | [
"Apache-2.0"
] | null | null | null | import copy
import math
from random import *
import matplotlib.pyplot as plt
import numpy as np
# from Code import evaluate
from sklearn import linear_model
from sklearn.linear_model import LinearRegression
from sklearn.svm import SVR
def evaluate(l, prob, approach):
rmse = 0
wl = copy.deepcopy(l)
h = 0
while h < 168:
x = 0
while x < 8:
y = 0
while y < 8:
if approach == "lr":
reg = LinearRegression()
elif approach == 'ridge':
reg = linear_model.Ridge(alpha=.5)
elif approach == 'rbfsvr':
reg = SVR(kernel='rbf', C=1e3, gamma=0.1)
elif approach == 'lsvr':
reg = SVR(kernel='linear', C=1e3)
elif approach == 'polysvr':
reg = SVR(kernel='poly', C=1e3, degree=2)
hour = h
i = 0
xTrain = []
yTrain = []
while hour < 732:
r = random()
if r < prob:
wl[hour][x][y] = -1
else:
xTrain.append(hour)
yTrain.append(wl[hour][x][y])
hour += 168
i += 1
if len(xTrain) != 0:
t = 0
while t < len(xTrain):
xTrain[t] = ([xTrain[t]])
t += 1
xTrain = np.array(xTrain)
yTrain = np.array(yTrain)
reg.fit(xTrain, yTrain)
hour = h
while hour < 732:
if wl[hour][x][y] == -1:
wl[hour][x][y] = int(reg.predict(np.array([[hour]]))[0])
if wl[hour][x][y] < 0:
wl[hour][x][y] = 0
areaMid = 0
num = 0
if x != 0:
if wl[hour][x - 1][y] != -1:
areaMid += wl[hour][x - 1][y]
num += 1
if x != 7:
if wl[hour][x + 1][y] != -1:
areaMid += wl[hour][x + 1][y]
num += 1
if y != 0:
if wl[hour][x][y - 1] != -1:
areaMid += wl[hour][x][y - 1]
num += 1
if y != 7:
if wl[hour][x][y + 1] != -1:
areaMid += wl[hour][x][y + 1]
num += 1
if num != 0:
areaMid = areaMid / num
hourMid = 0
num = 0
if hour != 0:
if wl[hour - 1][x][y] != -1:
hourMid += wl[hour - 1][x][y]
num += 1
if hour != 731:
if wl[hour + 1][x][y] != -1:
hourMid += wl[hour + 1][x][y]
num += 1
if num != 0:
hourMid = hourMid / num
num = 1
# if areaMid != 0:
# wl[hour][x][y] += areaMid
# num += 1
if hourMid != 0:
wl[hour][x][y] += hourMid
num += 1
wl[hour][x][y] = wl[hour][x][y] / num
#eval
# print (l[hour][x][y] - wl[hour][x][y])
rmse += ((l[hour][x][y] - wl[hour][x][y]) * (l[hour][x][y] - wl[hour][x][y]))
hour += 168
y += 1
x += 1
h += 1
return math.sqrt(rmse)
def evaluateDay(l, prob, approach):
sum = 0
rmse = 0
wl = copy.deepcopy(l)
h = 0
while h < 24:
x = 0
while x < 8:
y = 0
while y < 8:
if approach == "lr":
reg = LinearRegression()
elif approach == 'ridge':
reg = linear_model.Ridge(alpha=.5)
elif approach == 'rbfsvr':
reg = SVR(kernel='rbf', C=1e3, gamma=0.1)
elif approach == 'lsvr':
reg = SVR(kernel='linear', C=1e3)
elif approach == 'polysvr':
reg = SVR(kernel='poly', C=1e3, degree=2)
hour = h
i = 0
xTrain = []
yTrain = []
while hour < 732:
r = random()
if r < prob:
sum += wl[hour][x][y]
wl[hour][x][y] = -1
else:
xTrain.append(hour)
yTrain.append(wl[hour][x][y])
hour += 24
i += 1
if len(xTrain) != 0:
t = 0
while t < len(xTrain):
xTrain[t] = ([xTrain[t]])
t += 1
xTrain = np.array(xTrain)
yTrain = np.array(yTrain)
reg.fit(xTrain, yTrain)
hour = h
while hour < 732:
if wl[hour][x][y] == -1:
wl[hour][x][y] = int(reg.predict(np.array([[hour]]))[0])
if wl[hour][x][y] < 0:
wl[hour][x][y] = 0
#eval
# print (l[hour][x][y] - wl[hour][x][y])
rmse += ((l[hour][x][y] - wl[hour][x][y]) * (l[hour][x][y] - wl[hour][x][y]))
hour += 24
y += 1
x += 1
h += 1
print(sum)
return math.sqrt(rmse)
def evalu(l, rl, wl, prob):
rmse = 0
vl = copy.deepcopy(l)
h = 0
while h < 24:
x = 0
while x < 8:
y = 0
while y < 8:
hour = h
i = 0
while hour < 732:
r = random()
if r < prob:
vl[hour][x][y] = -1
hour += 24
i += 1
while hour < 732:
if vl[hour][x][y] == -1:
vl[hour][x][y] = (rl[hour][x][y] + wl[hour][x][y])/2
if vl[hour][x][y] < 0:
vl[hour][x][y] = 0
# eval
# print (l[hour][x][y] - wl[hour][x][y])
rmse += ((l[hour][x][y] - vl[hour][x][y]) * (l[hour][x][y] - vl[hour][x][y]))
hour += 24
y += 1
x += 1
h += 1
return math.sqrt(rmse)
def predict(wl, approach):
h = 0
while h < 168:
x = 0
while x < 8:
y = 0
while y < 8:
if approach == "lr":
reg = LinearRegression()
elif approach == 'ridge':
reg = linear_model.Ridge(alpha=.5)
elif approach == 'rbfsvr':
reg = SVR(kernel='rbf', C=1e3, gamma=0.1)
elif approach == 'lsvr':
reg = SVR(kernel='linear', C=1e3)
elif approach == 'polysvr':
reg = SVR(kernel='poly', C=1e3, degree=2)
hour = h
i = 0
xTrain = []
yTrain = []
while hour < 732:
if wl[hour][x][y] != -1:
xTrain.append(hour)
yTrain.append(wl[hour][x][y])
hour += 168
i += 1
if len(xTrain) != 0:
t = 0
while t < len(xTrain):
xTrain[t] = ([xTrain[t]])
t += 1
xTrain = np.array(xTrain)
yTrain = np.array(yTrain)
reg.fit(xTrain, yTrain)
hour = h
while hour < 732:
if wl[hour][x][y] == -1:
wl[hour][x][y] = int(reg.predict(np.array([[hour]]))[0])
if wl[hour][x][y] < 0:
wl[hour][x][y] = 0
areaMid = 0
num = 0
if x != 0:
if wl[hour][x-1][y] != -1:
areaMid += wl[hour][x-1][y]
num +=1
if x != 7:
if wl[hour][x+1][y] != -1:
areaMid += wl[hour][x+1][y]
num += 1
if y != 0:
if wl[hour][x][y-1] != -1:
areaMid += wl[hour][x][y-1]
num += 1
if y != 7:
if wl[hour][x][y+1] != -1:
areaMid += wl[hour][x][y+1]
num += 1
if num != 0:
areaMid = areaMid/num
hourMid = 0
num = 0
if hour != 0:
if wl[hour-1][x][y] != -1:
hourMid += wl[hour-1][x][y]
num += 1
if hour != 731:
if wl[hour+1][x][y] != -1:
hourMid += wl[hour+1][x][y]
num += 1
if num != 0:
hourMid = hourMid / num
num = 1
# if areaMid != 0:
# wl[hour][x][y] += areaMid
# num += 1
if hourMid != 0:
wl[hour][x][y] += hourMid
num += 1
wl[hour][x][y] = wl[hour][x][y]/num
hour += 168
else:
hour = h
while hour < 732:
if wl[hour][x][y] == -1:
hourMid = 0
num = 0
if hour != 0:
if wl[hour - 1][x][y] != -1:
hourMid += wl[hour - 1][x][y]
num += 1
if hour != 731:
if wl[hour + 1][x][y] != -1:
hourMid += wl[hour + 1][x][y]
num += 1
if num != 0:
hourMid = hourMid / num
wl[hour][x][y] = 0
num = 1
if hourMid != 0:
wl[hour][x][y] += 2*hourMid
num += 2
wl[hour][x][y] = wl[hour][x][y] / num
hour += 168
y += 1
x += 1
h += 1
def predictDay(wl, approach):
h = 0
while h < 24:
x = 0
while x < 8:
y = 0
while y < 8:
if approach == "lr":
reg = LinearRegression()
elif approach == 'ridge':
reg = linear_model.Ridge(alpha=.5)
elif approach == 'rbfsvr':
reg = SVR(kernel='rbf', C=1e3, gamma=0.1)
elif approach == 'lsvr':
reg = SVR(kernel='linear', C=1e3)
elif approach == 'polysvr':
reg = SVR(kernel='poly', C=1e3, degree=2)
hour = h
i = 0
xTrain = []
yTrain = []
while hour < 732:
if wl[hour][x][y] != -1:
xTrain.append(hour)
yTrain.append(wl[hour][x][y])
hour += 24
i += 1
if len(xTrain) != 0:
t = 0
while t < len(xTrain):
xTrain[t] = ([xTrain[t]])
t += 1
xTrain = np.array(xTrain)
yTrain = np.array(yTrain)
reg.fit(xTrain, yTrain)
hour = h
while hour < 732:
if wl[hour][x][y] == -1:
wl[hour][x][y] = int(reg.predict(np.array([[hour]]))[0])
if wl[hour][x][y] < 0:
wl[hour][x][y] = 0
hour += 24
y += 1
x += 1
h += 1
def weekPlot(l, h, name):
num = [[0 for x in range(5)] for x in range(8)]
time = [[0 for x in range(5)] for x in range(8)]
x = 0
while x < 8:
hour = h
i = 0
while hour < 732:
time[x][i] = hour
num[x][i] = l[hour][x][5]
hour += 168
i += 1
x += 1
fig = plt.figure()
cnt = 1
while cnt < 8:
plt.plot(time[cnt], num[cnt])
cnt += 1
plt.title("Week Plot")
plt.xlabel("Time")
plt.ylabel("Frequency")
# plt.show()
fig.savefig(name + str(h) + "_Week_Plot.png")
def dayPlot(l, h, name):
num = [[0 for x in range(31)] for x in range(8)]
time = [[0 for x in range(31)] for x in range(8)]
x = 0
while x < 8:
hour = h
i = 0
while hour < 732:
time[x][i] = hour
num[x][i] = l[hour][x][5]
hour += 24
i += 1
x += 1
fig = plt.figure()
cnt = 1
while cnt < 8:
plt.plot(time[cnt], num[cnt])
cnt += 1
plt.title("Week Plot")
plt.xlabel("Time")
plt.ylabel("Frequency")
# plt.show()
fig.savefig(name + str(h) + "_Day_Plot.png")
fin = open('input.txt', 'r')
f = open('data.txt', 'r')
fout = open("output.txt","w+")
l = [[[0 for x in range(8)]for x in range(8)]for x in range(732)]
i = 0
while i < 732:
if i == 0:
cnt = 1
else:
cnt = 3
for line in f:
if cnt == 1 or cnt == 2:
print ("first lines")
else:
l[i][cnt - 3] = [int(num) for num in line.split(' ')]
if cnt >= 10:
break
cnt += 1
i += 1
print ('1')
wl = copy.deepcopy(l)
predict(wl, "lr")
print ('2')
rl = copy.deepcopy(l)
predictDay(rl, "rbfsvr")
print ('3')
rwl = copy.deepcopy(l)
predict(rwl, "rbfsvr")
#evaluate
print("Linear Regression Prediction MSRE")
print(evaluate(l, 0.1, 'lr'))
print("My Prediction")
print(evaluateDay(l, 0.1, 'rbfsvr'))
print("polysvr Dayyy Prediction MSRE")
print(evaluate(l, 0.1, 'rbfsvr'))
print("Mid between two approach")
print(evalu(l, rl, wl, 0.1))
h = 0
for h in range (4):
weekPlot(l, h, "Before_Prediction_")
dayPlot(l, h, "Before_Prediction_")
h = 0
for h in range (2):
weekPlot(wl, h, "After_Linear_Prediction_")
weekPlot(rl, h, "After_PolySVR_Prediction_")
dayPlot(wl, h, "After_Linear_Prediction_")
dayPlot(rl, h, "After_Linear_Prediction_")
for line in fin:
input = [int(num) for num in line.split(' ')]
fout.write(str(rl[input[2]][input[0]][input[1]])+'\n') | 31.603448 | 105 | 0.312966 | 1,712 | 16,497 | 3.000584 | 0.078855 | 0.032315 | 0.082928 | 0.084096 | 0.818376 | 0.791318 | 0.782363 | 0.757835 | 0.742457 | 0.727857 | 0 | 0.053991 | 0.561011 | 16,497 | 522 | 106 | 31.603448 | 0.655344 | 0.018488 | 0 | 0.827423 | 0 | 0 | 0.032641 | 0.005997 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016548 | false | 0 | 0.018913 | 0 | 0.042553 | 0.030733 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
bf943c12e57419b3ae6d13b399efad140c744e2b | 10,175 | py | Python | tests/test_mixture.py | andriygav/MixtureLib | 959678aff2d04bf79e9937bd3884ed4061d25927 | [
"MIT"
] | 4 | 2019-12-08T13:09:50.000Z | 2022-03-31T04:41:27.000Z | tests/test_mixture.py | andriygav/MixtureLib | 959678aff2d04bf79e9937bd3884ed4061d25927 | [
"MIT"
] | 13 | 2019-11-04T13:22:59.000Z | 2020-03-31T20:20:09.000Z | tests/test_mixture.py | andriygav/MixtureLib | 959678aff2d04bf79e9937bd3884ed4061d25927 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import torch
from mixturelib.mixture import Mixture
from mixturelib.mixture import MixtureEM
from mixturelib.local_models import EachModelLinear
from mixturelib.hyper_models import HyperModelDirichlet
from mixturelib.hyper_models import HyperExpertNN
from mixturelib.regularizers import RegularizeFunc
def test_Mixture():
model = Mixture()
with pytest.raises(NotImplementedError):
model.fit(None, None)
with pytest.raises(NotImplementedError):
model.predict(None)
def test_MixtureEM_sample_init():
with pytest.raises(ValueError):
mixture = MixtureEM()
with pytest.raises(ValueError):
mixture = MixtureEM(ListOfModels=[])
torch.random.manual_seed(42)
HyperParameters = {'beta': 1.}
hyper_model = HyperExpertNN(input_dim = 2,
hidden_dim = 2,
output_dim = 2,
epochs=10,
device = 'cpu')
first_model = EachModelLinear(input_dim=2)
secode_model = EachModelLinear(input_dim=2)
list_of_models = [first_model, secode_model]
list_regulizer = [RegularizeFunc(ListOfModels=list_of_models)]
with pytest.raises(ValueError):
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
ListOfRegularizeModel=list_regulizer,
model_type='test',
device='cpu')
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
ListOfRegularizeModel=list_regulizer,
model_type='sample',
device='cpu')
assert mixture.K == 2
assert mixture.device == 'cpu'
assert mixture.HyperParameters['beta'] == torch.tensor(1.)
assert mixture.HyperModel == hyper_model
assert mixture.ListOfRegularizeModel[0] == list_regulizer[0]
assert len(mixture.ListOfModels) == len(list_of_models)
assert mixture.pZ is None
def test_MixtureEM_sample_E_step():
torch.random.manual_seed(42)
HyperParameters = {'beta': 1.}
hyper_model = HyperExpertNN(input_dim = 2,
hidden_dim = 2,
output_dim = 2,
epochs=10,
device = 'cpu')
first_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
secode_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
list_of_models = [first_model, secode_model]
list_regulizer = [RegularizeFunc(ListOfModels=list_of_models)]
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
model_type='sample',
device='cpu')
X = torch.randn(2, 2)
Y = torch.randn(2, 1)
mixture.E_step(X, Y)
assert mixture.pZ.long().sum().item() == 0
assert mixture.temp2_proba.long().sum().item() == 0
assert mixture.indexes.long().sum().item() == 0
assert mixture.lerning_indexes[0][0].item() == 0
assert mixture.lerning_indexes[0][1].item() == 1
assert mixture.lerning_indexes[1][0].item() == 0
assert mixture.lerning_indexes[0][1].item() == 1
def test_MixtureEM_sample_E_step():
torch.random.manual_seed(42)
HyperParameters = {'beta': 1.}
hyper_model = HyperExpertNN(input_dim = 2,
hidden_dim = 2,
output_dim = 2,
epochs=10,
device = 'cpu')
first_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
secode_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
list_of_models = [first_model, secode_model]
list_regulizer = [RegularizeFunc(ListOfModels=list_of_models)]
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
ListOfRegularizeModel=list_regulizer,
model_type='sample',
device='cpu')
X = torch.randn(200, 2)
Y = torch.randn(200, 1)
mixture.E_step(X, Y)
mixture.M_step(X, Y)
def test_MixtureEM_sample_fit_predict():
torch.random.manual_seed(42)
HyperParameters = {'beta': 1.}
hyper_model = HyperExpertNN(input_dim = 2,
hidden_dim = 2,
output_dim = 2,
epochs=10,
device = 'cpu')
first_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
secode_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
list_of_models = [first_model, secode_model]
list_regulizer = [RegularizeFunc(ListOfModels=list_of_models)]
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
ListOfRegularizeModel=list_regulizer,
model_type='sample',
device='cpu')
X = torch.randn(20, 2)
Y = torch.randn(20, 1)
mixture.fit(X, Y, progress=enumerate)
answ, pi = mixture.predict(X)
assert answ.sum().long() == 0
assert pi.sum() == 20
assert mixture.fit(None, Y) is None
assert mixture.fit(X, None) is None
def test_MixtureEM_init():
torch.random.manual_seed(42)
HyperParameters = {'beta': 1.}
hyper_model = HyperModelDirichlet(
output_dim = 2, device = 'cpu')
first_model = EachModelLinear(input_dim=2)
secode_model = EachModelLinear(input_dim=2)
list_of_models = [first_model, secode_model]
list_regulizer = [RegularizeFunc(ListOfModels=list_of_models)]
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
ListOfRegularizeModel=list_regulizer,
device='cpu')
assert mixture.K == 2
assert mixture.device == 'cpu'
assert mixture.HyperParameters['beta'] == torch.tensor(1.)
assert mixture.HyperModel == hyper_model
assert mixture.ListOfRegularizeModel[0] == list_regulizer[0]
assert len(mixture.ListOfModels) == len(list_of_models)
assert mixture.pZ is None
def test_MixtureEM_E_step():
torch.random.manual_seed(42)
HyperParameters = {'beta': 1.}
hyper_model = HyperModelDirichlet(
output_dim = 2, device = 'cpu')
first_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
secode_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
list_of_models = [first_model, secode_model]
list_regulizer = [RegularizeFunc(ListOfModels=list_of_models)]
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
device='cpu')
X = torch.randn(2, 2)
Y = torch.randn(2, 1)
mixture.E_step(X, Y)
assert mixture.pZ.long().sum().item() == 0
assert mixture.temp2_proba.long().sum().item() == 0
assert mixture.indexes.long().sum().item() == 0
assert mixture.lerning_indexes[0][0].item() == 0
assert mixture.lerning_indexes[0][1].item() == 1
assert mixture.lerning_indexes[1][0].item() == 0
assert mixture.lerning_indexes[0][1].item() == 1
def test_MixtureEM_E_step():
torch.random.manual_seed(42)
HyperParameters = {'beta': 1.}
hyper_model = HyperModelDirichlet(
output_dim = 2, device = 'cpu')
first_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
secode_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
list_of_models = [first_model, secode_model]
list_regulizer = [RegularizeFunc(ListOfModels=list_of_models)]
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
ListOfRegularizeModel=list_regulizer,
device='cpu')
X = torch.randn(200, 2)
Y = torch.randn(200, 1)
mixture.E_step(X, Y)
mixture.M_step(X, Y)
def test_MixtureEM_fit_predict():
torch.random.manual_seed(42)
HyperParameters = {'beta': 1.}
hyper_model = HyperModelDirichlet(
output_dim = 2, device = 'cpu')
first_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
secode_model = EachModelLinear(
input_dim=2,
w=torch.tensor([.0, 0.]),
A=torch.tensor([1., 1.]))
list_of_models = [first_model, secode_model]
list_regulizer = [RegularizeFunc(ListOfModels=list_of_models)]
mixture = MixtureEM(HyperParameters=HyperParameters,
HyperModel=hyper_model,
ListOfModels=list_of_models,
ListOfRegularizeModel=list_regulizer,
device='cpu')
X = torch.randn(20, 2)
Y = torch.randn(20, 1)
mixture.fit(X, Y)
answ, pi = mixture.predict(X)
assert answ.sum().long() == 0
assert pi.sum() == 20
| 30.464072 | 66 | 0.580442 | 1,094 | 10,175 | 5.203839 | 0.079525 | 0.022484 | 0.056912 | 0.071667 | 0.931495 | 0.898121 | 0.878799 | 0.878799 | 0.876164 | 0.876164 | 0 | 0.027628 | 0.306339 | 10,175 | 333 | 67 | 30.555556 | 0.778974 | 0.004128 | 0 | 0.90535 | 0 | 0 | 0.012338 | 0 | 0 | 0 | 0 | 0 | 0.139918 | 1 | 0.037037 | false | 0 | 0.032922 | 0 | 0.069959 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
44a7d09346215832a736f76e6162922a952ad3e7 | 58,292 | py | Python | src/openprocurement/tender/belowthreshold/tests/auction_blanks.py | ProzorroUKR/openprocurement.api | 2855a99aa8738fb832ee0dbad4e9590bd3643511 | [
"Apache-2.0"
] | 10 | 2020-02-18T01:56:21.000Z | 2022-03-28T00:32:57.000Z | src/openprocurement/tender/belowthreshold/tests/auction_blanks.py | quintagroup/openprocurement.api | 2855a99aa8738fb832ee0dbad4e9590bd3643511 | [
"Apache-2.0"
] | 26 | 2018-07-16T09:30:44.000Z | 2021-02-02T17:51:30.000Z | src/openprocurement/tender/belowthreshold/tests/auction_blanks.py | ProzorroUKR/openprocurement.api | 2855a99aa8738fb832ee0dbad4e9590bd3643511 | [
"Apache-2.0"
] | 15 | 2019-08-08T10:50:47.000Z | 2022-02-05T14:13:36.000Z | # -*- coding: utf-8 -*-
from datetime import timedelta
from openprocurement.tender.core.tests.base import change_auth
from openprocurement.tender.belowthreshold.tests.base import test_cancellation, test_draft_claim
from openprocurement.api.constants import RELEASE_2020_04_19
from openprocurement.api.utils import get_now
def update_patch_data(self, patch_data, key=None, start=0, interval=None, with_weighted_value=False):
if start:
iterator = list(range(self.min_bids_number))[start::interval]
else:
iterator = list(range(self.min_bids_number))[::interval]
bid_patch_data_value = {
"value": {
"amount": 489,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
if with_weighted_value:
bid_patch_data_value.update({
"weightedValue": {
"amount": 479,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
})
for x in iterator:
bid_patch_data = {"id": self.initial_bids[x]["id"]}
if key == "value":
bid_patch_data.update(bid_patch_data_value)
elif key == "lotValues":
bid_patch_data.update({"lotValues": [bid_patch_data_value]})
patch_data["bids"].append(bid_patch_data)
# TenderAuctionResourceTest
def get_tender_auction_not_found(self):
response = self.app.get("/tenders/some_id/auction", status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{"description": "Not Found", "location": "url", "name": "tender_id"}]
)
response = self.app.patch_json("/tenders/some_id/auction", {"data": {}}, status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{"description": "Not Found", "location": "url", "name": "tender_id"}]
)
response = self.app.post_json("/tenders/some_id/auction", {"data": {}}, status=404)
self.assertEqual(response.status, "404 Not Found")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["status"], "error")
self.assertEqual(
response.json["errors"], [{"description": "Not Found", "location": "url", "name": "tender_id"}]
)
def get_tender_auction(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.get("/tenders/{}/auction".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
auction = response.json["data"]
self.assertNotEqual(auction, self.initial_data)
self.assertIn("dateModified", auction)
self.assertIn("minimalStep", auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]["value"]["amount"], self.initial_bids[0]["value"]["amount"])
self.assertEqual(auction["bids"][1]["value"]["amount"], self.initial_bids[1]["value"]["amount"])
# self.assertEqual(self.initial_data["auctionPeriod"]['startDate'], auction["auctionPeriod"]['startDate'])
response = self.app.get("/tenders/{}/auction?opt_jsonp=callback".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/javascript")
self.assertIn('callback({"data": {"', response.body.decode()) # PY3_TRICK
response = self.app.get("/tenders/{}/auction?opt_pretty=1".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertIn('{\n "data": {\n "', response.body.decode())
self.set_status("active.qualification")
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current (active.qualification) tender status",
)
def post_tender_auction(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": {}}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't report auction results in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.post_json(
"/tenders/{}/auction".format(self.tender_id),
{"data": {"bids": [{"invalid_field": "invalid_value"}]}},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[{"description": {"invalid_field": "Rogue field"}, "location": "body", "name": "bids"}],
)
patch_data = {
"bids": [
{
"id": self.initial_bids[-1]["id"],
"value": {"amount": 409, "currency": "UAH", "valueAddedTaxIncluded": True},
}
]
}
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
["Number of auction results did not match the number of tender bids"]
)
update_patch_data(self, patch_data, key="value", start=-2, interval=-1)
patch_data["bids"][-1]["id"] = "some_id"
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], {"id": ["Hash value is wrong length."]})
patch_data["bids"][-1]["id"] = "00000000000000000000000000000000"
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], ["Auction bids should be identical to the tender bids"])
patch_data["bids"] = [{"value": {"amount": n}}
for n, b in enumerate(self.initial_bids)]
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
for n, b in enumerate(tender["bids"]):
self.assertEqual(b["value"]["amount"], n)
self.assertEqual("active.qualification", tender["status"])
self.assertIn("tenderers", tender["bids"][0])
self.assertIn("name", tender["bids"][0]["tenderers"][0])
self.assertEqual(tender["awards"][0]["bid_id"], self.initial_bids[0]["id"])
self.assertEqual(tender["awards"][0]["value"]["amount"], 0)
self.assertEqual(tender["awards"][0]["suppliers"], self.initial_bids[0]["tenderers"])
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't report auction results in current (active.qualification) tender status",
)
def post_tender_auction_weighted_value(self):
if self.tender_class.procurementMethodType.default not in ("openua", "openeu", "simple.defense"):
self.skipTest("weightedValue is not implemented")
self.app.authorization = ("Basic", ("auction", ""))
self.set_status("active.auction")
patch_data = {"bids": []}
update_patch_data(self, patch_data, key="value", start=0, interval=1, with_weighted_value=True)
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
first_bid_weighted_amount = tender["bids"][0]["weightedValue"]["amount"]
last_bid_weighted_amount = tender["bids"][-1]["weightedValue"]["amount"]
first_bid_patch_weighted_amount = patch_data["bids"][0]["weightedValue"]["amount"]
last_bid_patch_weighted_amount = patch_data["bids"][-1]["weightedValue"]["amount"]
self.assertEqual(first_bid_weighted_amount, last_bid_patch_weighted_amount)
self.assertEqual(last_bid_weighted_amount, first_bid_patch_weighted_amount)
self.assertEqual("active.qualification", tender["status"])
self.assertEqual(tender["awards"][0]["weightedValue"]["amount"], first_bid_patch_weighted_amount)
def patch_tender_auction(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.patch_json("/tenders/{}/auction".format(self.tender_id), {"data": {}}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't update auction urls in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.patch_json(
"/tenders/{}/auction".format(self.tender_id),
{"data": {"bids": [{"invalid_field": "invalid_value"}]}},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[{"description": {"invalid_field": "Rogue field"}, "location": "body", "name": "bids"}],
)
patch_data = {
"auctionUrl": "http://auction-sandbox.openprocurement.org/tenders/{}".format(self.tender_id),
"bids": [
{
"id": self.initial_bids[-1]["id"],
"participationUrl": "http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}".format(
self.tender_id, self.initial_bids[-1]["id"]
),
}
],
}
response = self.app.patch_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], ["Number of bids did not match the number of tender bids"]
)
for x in list(range(self.min_bids_number))[-2::-1]:
patch_data["bids"].append(
{
"id": self.initial_bids[x]["id"],
"participationUrl": "http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}".format(
self.tender_id, self.initial_bids[x]["id"]
),
}
)
patch_data["bids"][-1]["id"] = "some_id"
response = self.app.patch_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], {"id": ["Hash value is wrong length."]})
patch_data["bids"][-1]["id"] = "00000000000000000000000000000000"
response = self.app.patch_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], ["Auction bids should be identical to the tender bids"])
patch_data["bids"] = [{"participationUrl": f"http://auction.prozorro.gov.ua/{b['id']}"}
for b in self.initial_bids]
response = self.app.patch_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
for b in tender["bids"]:
self.assertEqual(b["participationUrl"], f"http://auction.prozorro.gov.ua/{b['id']}")
self.set_status("complete")
response = self.app.patch_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't update auction urls in current (complete) tender status"
)
def post_tender_auction_document(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")], status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't add document in current ({}) tender status".format(
self.forbidden_auction_document_create_actions_status
),
)
self.set_status("active.auction")
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")]
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
doc_id = response.json["data"]["id"]
key = response.json["data"]["url"].split("?")[-1].split("=")[-1]
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id),
{"data": {"bids": [{"id": b["id"]} for b in self.initial_bids]}})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["status"], "active.qualification")
response = self.app.put(
"/tenders/{}/documents/{}".format(self.tender_id, doc_id),
upload_files=[("file", "name.doc", b"content_with_names")],
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split("?")[-1].split("=")[-1]
self.assertNotEqual(key, key2)
self.set_status("complete")
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")], status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add document in current (complete) tender status"
)
# TenderSameValueAuctionResourceTest
def post_tender_auction_not_changed(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id),
{"data": {"bids": [
{"id": b["id"], "value": b["value"]}
for b in self.initial_bids]}})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
self.assertEqual("active.qualification", tender["status"])
self.assertEqual(tender["awards"][0]["bid_id"], self.initial_bids[0]["id"])
self.assertEqual(tender["awards"][0]["value"]["amount"], self.initial_bids[0]["value"]["amount"])
self.assertEqual(tender["awards"][0]["suppliers"], self.initial_bids[0]["tenderers"])
def post_tender_auction_reversed(self):
self.app.authorization = ("Basic", ("auction", ""))
now = get_now()
patch_data = {
"bids": [
{"id": b["id"], "date": (now - timedelta(seconds=i)).isoformat(), "value": b["value"]}
for i, b in enumerate(self.initial_bids)
]
}
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
self.assertEqual("active.qualification", tender["status"])
self.assertEqual(tender["awards"][0]["bid_id"], self.initial_bids[-1]["id"])
self.assertEqual(tender["awards"][0]["value"]["amount"], self.initial_bids[-1]["value"]["amount"])
self.assertEqual(tender["awards"][0]["suppliers"], self.initial_bids[-1]["tenderers"])
# TenderLotAuctionResourceTest
def get_tender_lot_auction(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.get("/tenders/{}/auction".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
auction = response.json["data"]
self.assertNotEqual(auction, self.initial_data)
self.assertIn("dateModified", auction)
self.assertIn("minimalStep", auction)
self.assertIn("lots", auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(
auction["bids"][0]["lotValues"][0]["value"]["amount"], self.initial_bids[0]["lotValues"][0]["value"]["amount"]
)
self.assertEqual(
auction["bids"][1]["lotValues"][0]["value"]["amount"], self.initial_bids[1]["lotValues"][0]["value"]["amount"]
)
self.set_status("active.qualification")
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current (active.qualification) tender status",
)
def post_tender_lot_auction_weighted_value(self):
if self.tender_class.procurementMethodType.default not in ("openua", "openeu", "simple.defense"):
self.skipTest("weightedValue is not implemented")
self.app.authorization = ("Basic", ("auction", ""))
self.set_status("active.auction")
patch_data = {
"bids": [
{
"id": self.initial_bids[-1]["id"],
"lotValues": [{
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
},
"weightedValue": {
"amount": 399,
"currency": "UAH",
"valueAddedTaxIncluded": True
},
}],
}
]
}
update_patch_data(self, patch_data, key="lotValues", start=-2, interval=-1, with_weighted_value=True)
for lot in self.initial_lots:
response = self.app.post_json("/tenders/{}/auction/{}".format(self.tender_id, lot["id"]), {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
first_bid_weighted_amount = tender["bids"][0]["lotValues"][0]["weightedValue"]["amount"]
last_bid_weighted_amount = tender["bids"][-1]["lotValues"][0]["weightedValue"]["amount"]
first_bid_patch_weighted_amount = patch_data["bids"][0]["lotValues"][0]["weightedValue"]["amount"]
last_bid_patch_weighted_amount = patch_data["bids"][-1]["lotValues"][0]["weightedValue"]["amount"]
self.assertEqual(first_bid_weighted_amount, last_bid_patch_weighted_amount)
self.assertEqual(last_bid_weighted_amount, first_bid_patch_weighted_amount)
self.assertEqual("active.qualification", tender["status"])
self.assertEqual(tender["awards"][0]["weightedValue"]["amount"], first_bid_patch_weighted_amount)
def post_tender_lot_auction_document(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")], status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't add document in current ({}) tender status".format(
self.forbidden_auction_document_create_actions_status
),
)
self.set_status("active.auction")
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")]
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
doc_id = response.json["data"]["id"]
key = response.json["data"]["url"].split("?")[-1].split("=")[-1]
response = self.app.patch_json(
"/tenders/{}/documents/{}".format(self.tender_id, doc_id),
{"data": {"documentOf": "lot", "relatedItem": self.initial_lots[0]["id"]}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["documentOf"], "lot")
self.assertEqual(response.json["data"]["relatedItem"], self.initial_lots[0]["id"])
patch_data = {"bids": [{"id": b["id"], "lotValues": [{"relatedLot": l["id"]} for l in self.initial_lots]}
for b in self.initial_bids]}
lot_id = self.initial_lots[0]["id"]
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
response = self.app.put(
"/tenders/{}/documents/{}".format(self.tender_id, doc_id),
upload_files=[("file", "name.doc", b"content_with_names")],
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split("?")[-1].split("=")[-1]
self.assertNotEqual(key, key2)
self.set_status("complete")
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")], status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add document in current (complete) tender status"
)
# TenderMultipleLotAuctionResourceTest
def get_tender_lots_auction(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.get("/tenders/{}/auction".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
auction = response.json["data"]
self.assertNotEqual(auction, self.initial_data)
self.assertIn("dateModified", auction)
self.assertIn("minimalStep", auction)
self.assertIn("lots", auction)
self.assertIn("items", auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(
auction["bids"][0]["lotValues"][0]["value"]["amount"], self.initial_bids[0]["lotValues"][0]["value"]["amount"]
)
self.assertEqual(
auction["bids"][1]["lotValues"][0]["value"]["amount"], self.initial_bids[1]["lotValues"][0]["value"]["amount"]
)
self.assertEqual(
auction["bids"][0]["lotValues"][1]["value"]["amount"], self.initial_bids[0]["lotValues"][1]["value"]["amount"]
)
self.assertEqual(
auction["bids"][1]["lotValues"][1]["value"]["amount"], self.initial_bids[1]["lotValues"][1]["value"]["amount"]
)
self.set_status("active.qualification")
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current (active.qualification) tender status",
)
def post_tender_lots_auction(self):
self.app.authorization = ("Basic", ("auction", ""))
lot_id = self.initial_lots[0]["id"]
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": {}}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't report auction results in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
# should not affect changing status
if self.initial_data["procurementMethodType"] in ("belowThreshold", "simple.defense"):
with change_auth(self.app, ("Basic", ("token", ""))):
self.app.post_json(
f"/tenders/{self.tender_id}/complaints",
{"data": test_draft_claim},
)
self.set_status("active.auction")
response = self.app.post_json(
"/tenders/{}/auction".format(self.tender_id),
{"data": {"bids": [{"invalid_field": "invalid_value"}]}},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[{"description": {"invalid_field": "Rogue field"}, "location": "body", "name": "bids"}],
)
patch_data = {
"bids": [
{
"id": self.initial_bids[-1]["id"],
"lotValues": [{"value": {"amount": 409, "currency": "UAH", "valueAddedTaxIncluded": True}}],
}
]
}
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
["Number of auction results did not match the number of tender bids"]
)
update_patch_data(self, patch_data, key="lotValues", start=-2, interval=-1)
patch_data["bids"][-1]["id"] = "some_id"
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], {"id": ["Hash value is wrong length."]})
patch_data["bids"][-1]["id"] = "00000000000000000000000000000000"
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], ["Auction bids should be identical to the tender bids"])
# patch_data["bids"][-1]["id"] = self.initial_bids[0]["id"]
patch_data["bids"] = [{"lotValues": [{}, {}, {}]} for b in self.initial_bids]
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
["Number of lots of auction results did not match the number of tender lots"],
)
patch_data["bids"] = [{"lotValues": [{"relatedLot": lot_id}, {"relatedLot": lot_id}]}
for b in self.initial_bids]
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
# self.assertEqual(response.json['errors'][0]["description"], [{u'lotValues': [{u'relatedLot': [u'relatedLot should be one of lots of bid']}]}])
self.assertEqual(
response.json["errors"][0]["description"],
['Auction bid.lotValues should be identical to the tender bid.lotValues']
)
num = 0
for lot in self.initial_lots:
patch_data["bids"] = [{"lotValues": [{"value": {"amount": 10 ** num}} for _ in b["lotValues"]]}
for b in self.initial_bids]
num += 1
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot['id']}", {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
for b in tender["bids"]:
self.assertEqual(b["lotValues"][0]["value"]["amount"], 1)
self.assertEqual(b["lotValues"][1]["value"]["amount"], 10)
self.assertEqual("active.qualification", tender["status"])
self.assertIn("tenderers", tender["bids"][0])
self.assertIn("name", tender["bids"][0]["tenderers"][0])
# self.assertIn(tender["awards"][0]["id"], response.headers['Location'])
self.assertEqual(tender["awards"][0]["bid_id"], self.initial_bids[0]["id"])
self.assertEqual(tender["awards"][0]["value"]["amount"], 1)
self.assertEqual(tender["awards"][0]["suppliers"], self.initial_bids[0]["tenderers"])
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't report auction results in current (active.qualification) tender status",
)
def post_tender_lots_auction_weighted_value(self):
if self.tender_class.procurementMethodType.default not in ("openua", "openeu", "simple.defense"):
self.skipTest("weightedValue is not implemented")
self.app.authorization = ("Basic", ("auction", ""))
self.set_status("active.auction")
patch_data = {"bids": []}
update_patch_data(self, patch_data, key="lotValues", with_weighted_value=True)
for bid in patch_data["bids"]:
bid["lotValues"] = [bid["lotValues"][0].copy() for i in self.initial_lots]
for lot in self.initial_lots:
response = self.app.post_json(
"/tenders/{}/auction/{}".format(self.tender_id, lot["id"]),
{"data": patch_data}
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
first_bid_weighted_amount = tender["bids"][0]["lotValues"][0]["weightedValue"]["amount"]
last_bid_weighted_amount = tender["bids"][-1]["lotValues"][0]["weightedValue"]["amount"]
first_bid_patch_weighted_amount = patch_data["bids"][0]["lotValues"][0]["weightedValue"]["amount"]
last_bid_patch_weighted_amount = patch_data["bids"][-1]["lotValues"][0]["weightedValue"]["amount"]
self.assertEqual(first_bid_weighted_amount, last_bid_patch_weighted_amount)
self.assertEqual(last_bid_weighted_amount, first_bid_patch_weighted_amount)
self.assertEqual("active.qualification", tender["status"])
self.assertEqual(tender["awards"][0]["weightedValue"]["amount"], first_bid_patch_weighted_amount)
def patch_tender_lots_auction(self):
self.app.authorization = ("Basic", ("auction", ""))
lot_id = self.initial_lots[0]["id"]
response = self.app.patch_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": {}}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't update auction urls in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
self.check_chronograph()
response = self.app.patch_json(
f"/tenders/{self.tender_id}/auction/{lot_id}",
{"data": {"bids": [{"invalid_field": "invalid_value"}]}},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[{"description": {"invalid_field": "Rogue field"}, "location": "body", "name": "bids"}],
)
patch_data = {
"auctionUrl": "http://auction-sandbox.openprocurement.org/tenders/{}".format(self.tender_id),
"bids": [
{
"id": b["id"],
"participationUrl": "http://auction-sandbox.openprocurement.org/tenders/id",
}
for b in self.initial_bids
],
}
response = self.app.patch_json(f"/tenders/{self.tender_id}/auction", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[
{
"description": [{"participationUrl": ["url should be posted for each lot of bid"]}],
"location": "body",
"name": "bids",
}
],
)
del patch_data["bids"][0]["participationUrl"]
patch_data["bids"][0]["lotValues"] = [
{
"participationUrl": "http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}".format(
self.tender_id, self.initial_bids[0]["id"]
)
}
]
patch_data = {
"lots": [{"auctionUrl": "http://auction.openprocurement.org/tenders/id"}],
"bids": [
{"lotValues": [{"participationUrl": "http://auction.openprocurement.org/id"} for v in b["lotValues"]]}
for b in self.initial_bids
],
}
response = self.app.patch_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[{'location': 'body', 'name': 'lots',
'description': ['Number of lots did not match the number of tender lots']}]
)
patch_data["lots"].append({})
patch_data["bids"][1]["id"] = "some_id"
response = self.app.patch_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], {"id": ["Hash value is wrong length."]})
patch_data["bids"][1]["id"] = "00000000000000000000000000000000"
response = self.app.patch_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], ["Auction bids should be identical to the tender bids"])
patch_data["bids"][1]["id"] = self.initial_bids[0]["id"]
patch_data["lots"][1]["id"] = "00000000000000000000000000000000"
response = self.app.patch_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], ["Auction lots should be identical to the tender lots"])
patch_data = {
"lots": [{"auctionUrl": "http://auction.openprocurement.org/tenders/id"}, {}],
"bids": [
{"lotValues": [{"participationUrl": "http://auction.openprocurement.org/id"}, {}, {}]}
for b in self.initial_bids
],
}
response = self.app.patch_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
["Number of lots of auction results did not match the number of tender lots"],
)
for bid in patch_data["bids"]:
bid["lotValues"] = [bid["lotValues"][0].copy() for i in self.initial_lots]
response = self.app.patch_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(
response.json["errors"][0],
{"location": "body", "name": "bids", "description": [
{"participationUrl": ["url should be posted for each lot of bid"]}]}
)
for lot in self.initial_lots:
patch_data = {
"lots": [
{"auctionUrl": f"http://auction.prozorro.gov.ua/{l['id']}"} if l["id"] == lot["id"] else {}
for l in self.initial_lots
],
"bids": [
{"lotValues": [
{"participationUrl": f"http://auction.prozorro.gov.ua/{v['relatedLot']}"}
if v["relatedLot"] == lot["id"] else {}
for v in b["lotValues"]
]}
for b in self.initial_bids
]
}
response = self.app.patch_json("/tenders/{}/auction/{}".format(self.tender_id, lot["id"]), {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
resp = response.json["data"]
for bid in resp["bids"]:
for l in bid["lotValues"]:
self.assertEqual(l["participationUrl"], f"http://auction.prozorro.gov.ua/{l['relatedLot']}")
for l in resp["lots"]:
self.assertEqual(l["auctionUrl"], f"http://auction.prozorro.gov.ua/{l['id']}")
self.app.authorization = ("Basic", ("token", ""))
cancellation = dict(**test_cancellation)
cancellation.update({
"status": "active",
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]["id"],
})
if RELEASE_2020_04_19 > get_now():
response = self.app.post_json("/tenders/{}/cancellations".format(self.tender_id), {"data": cancellation})
self.assertEqual(response.status, "201 Created")
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.patch_json(
"/tenders/{}/auction/{}".format(self.tender_id, self.initial_lots[0]["id"]), {"data": patch_data},
status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], "Can update auction urls only in active lot status")
def post_tender_lots_auction_document(self):
self.app.authorization = ("Basic", ("auction", ""))
lot_id = self.initial_lots[0]["id"]
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")], status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't add document in current ({}) tender status".format(
self.forbidden_auction_document_create_actions_status
),
)
self.set_status("active.auction")
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")]
)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.content_type, "application/json")
doc_id = response.json["data"]["id"]
key = response.json["data"]["url"].split("?")[-1].split("=")[-1]
response = self.app.patch_json(
"/tenders/{}/documents/{}".format(self.tender_id, doc_id),
{"data": {"documentOf": "lot", "relatedItem": self.initial_lots[0]["id"]}},
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["data"]["documentOf"], "lot")
self.assertEqual(response.json["data"]["relatedItem"], self.initial_lots[0]["id"])
patch_data = {"bids": [
{
"lotValues": [
{"relatedLot": i["id"]}
for i in self.initial_lots
],
} for b in self.initial_bids
]}
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
response = self.app.put(
"/tenders/{}/documents/{}".format(self.tender_id, doc_id),
upload_files=[("file", "name.doc", b"content_with_names")],
)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split("?")[-1].split("=")[-1]
self.assertNotEqual(key, key2)
self.set_status("complete")
response = self.app.post(
"/tenders/{}/documents".format(self.tender_id), upload_files=[("file", "name.doc", b"content")], status=403
)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], "Can't add document in current (complete) tender status"
)
# TenderFeaturesAuctionResourceTest
def get_tender_auction_feature(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.get("/tenders/{}/auction".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
auction = response.json["data"]
self.assertNotEqual(auction, self.initial_data)
self.assertIn("dateModified", auction)
self.assertIn("minimalStep", auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]["value"]["amount"], self.initial_bids[0]["value"]["amount"])
self.assertEqual(auction["bids"][1]["value"]["amount"], self.initial_bids[1]["value"]["amount"])
self.assertIn("features", auction)
self.assertIn("parameters", auction["bids"][0])
def post_tender_auction_feature(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.patch_json("/tenders/{}/auction".format(self.tender_id), {"data": {}}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't update auction urls in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.post_json(
"/tenders/{}/auction".format(self.tender_id),
{"data": {"bids": [{"invalid_field": "invalid_value"}]}},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[{"description": {"invalid_field": "Rogue field"}, "location": "body", "name": "bids"}],
)
patch_data = {
"bids": [
{
"id": self.initial_bids[-1]["id"],
"value": {"amount": 459, "currency": "UAH", "valueAddedTaxIncluded": True},
}
]
}
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], ["Number of auction results did not match the number of tender bids"]
)
update_patch_data(self, patch_data, key="value", start=-2, interval=-1)
patch_data["bids"][-1]["id"] = "some_id"
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], {"id": ["Hash value is wrong length."]})
patch_data["bids"][-1]["id"] = "00000000000000000000000000000000"
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], ["Auction bids should be identical to the tender bids"])
patch_data = {"bids": [
{"value": {"amount": 11111}},
{"value": {"amount": 2222}},
]}
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
self.assertIn("features", tender)
self.assertIn("parameters", tender["bids"][0])
self.assertEqual(tender["bids"][0]["value"]["amount"], patch_data["bids"][0]["value"]["amount"])
self.assertEqual(tender["bids"][1]["value"]["amount"], patch_data["bids"][1]["value"]["amount"])
self.assertEqual("active.qualification", tender["status"])
self.assertIn("tenderers", tender["bids"][0])
self.assertIn("name", tender["bids"][0]["tenderers"][0])
# bids have same amount, but bid with better parameters awarded
self.assertEqual(tender["awards"][0]["bid_id"], tender["bids"][1]["id"])
self.assertEqual(tender["awards"][0]["value"]["amount"], tender["bids"][1]["value"]["amount"])
self.assertEqual(tender["awards"][0]["suppliers"], self.initial_bids[1]["tenderers"])
response = self.app.post_json("/tenders/{}/auction".format(self.tender_id), {"data": patch_data}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't report auction results in current (active.qualification) tender status",
)
# TenderFeaturesMultilotAuctionResourceTest
def get_tender_lots_auction_features(self):
self.app.authorization = ("Basic", ("auction", ""))
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.get("/tenders/{}/auction".format(self.tender_id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
auction = response.json["data"]
self.assertNotEqual(auction, self.initial_data)
self.assertIn("dateModified", auction)
self.assertIn("minimalStep", auction)
self.assertIn("lots", auction)
self.assertIn("items", auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(
auction["bids"][0]["lotValues"][0]["value"]["amount"], self.initial_bids[0]["lotValues"][0]["value"]["amount"]
)
self.assertEqual(
auction["bids"][1]["lotValues"][0]["value"]["amount"], self.initial_bids[1]["lotValues"][0]["value"]["amount"]
)
self.assertEqual(
auction["bids"][0]["lotValues"][1]["value"]["amount"], self.initial_bids[0]["lotValues"][1]["value"]["amount"]
)
self.assertEqual(
auction["bids"][1]["lotValues"][1]["value"]["amount"], self.initial_bids[1]["lotValues"][1]["value"]["amount"]
)
self.assertEqual(auction["bids"][0]["parameters"][0]["code"], self.initial_bids[0]["parameters"][0]["code"])
self.assertEqual(auction["bids"][0]["parameters"][0]["value"], self.initial_bids[0]["parameters"][0]["value"])
self.assertEqual(auction["bids"][0]["parameters"][1]["code"], self.initial_bids[0]["parameters"][1]["code"])
self.assertEqual(auction["bids"][0]["parameters"][1]["value"], self.initial_bids[0]["parameters"][1]["value"])
self.assertIn("features", auction)
self.assertIn("parameters", auction["bids"][0])
self.set_status("active.qualification")
response = self.app.get("/tenders/{}/auction".format(self.tender_id), status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't get auction info in current (active.qualification) tender status",
)
def post_tender_lots_auction_features(self):
self.app.authorization = ("Basic", ("auction", ""))
lot_id = self.initial_lots[0]["id"]
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": {}}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't report auction results in current ({}) tender status".format(self.forbidden_auction_actions_status),
)
self.set_status("active.auction")
response = self.app.post_json(
"/tenders/{}/auction".format(self.tender_id),
{"data": {"bids": [{"invalid_field": "invalid_value"}]}},
status=422,
)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"],
[{"description": {"invalid_field": "Rogue field"}, "location": "body", "name": "bids"}],
)
patch_data = {
"bids": [
{
"id": self.initial_bids[-1]["id"],
"lotValues": [{"value": {"amount": 409, "currency": "UAH", "valueAddedTaxIncluded": True}}],
}
]
}
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"], ["Number of auction results did not match the number of tender bids"]
)
update_patch_data(self, patch_data, key="lotValues", start=-2, interval=-1)
patch_data["bids"][-1]["id"] = "some_id"
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], {"id": ["Hash value is wrong length."]})
patch_data["bids"][-1]["id"] = "00000000000000000000000000000000"
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(response.json["errors"][0]["description"], ["Auction bids should be identical to the tender bids"])
patch_data = {
"bids": [
{"lotValues": [{}, {}, {}]}
for b in self.initial_bids
]
}
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
["Number of lots of auction results did not match the number of tender lots"],
)
patch_data = {
"bids": [
{"lotValues": [
{"relatedLot": b["lotValues"][0]["relatedLot"]},
{"relatedLot": b["lotValues"][0]["relatedLot"]},
]}
for b in self.initial_bids
]
}
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=422)
self.assertEqual(response.status, "422 Unprocessable Entity")
self.assertEqual(response.content_type, "application/json")
# self.assertEqual(response.json['errors'][0]["description"], [{u'lotValues': [{u'relatedLot': [u'relatedLot should be one of lots of bid']}]}])
self.assertEqual(
response.json["errors"][0]["description"],
["Auction bid.lotValues should be identical to the tender bid.lotValues"]
)
patch_data = {
"bids": [
{"lotValues": [
{"value": {"amount": 1 + n}}
for n, l in enumerate(b["lotValues"])
]}
for b in self.initial_bids
]
}
for lot in self.initial_lots:
response = self.app.post_json("/tenders/{}/auction/{}".format(self.tender_id, lot["id"]), {"data": patch_data})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.content_type, "application/json")
tender = response.json["data"]
self.assertIn("features", tender)
self.assertIn("parameters", tender["bids"][0])
for b in tender["bids"]:
self.assertEqual(b["lotValues"][0]["value"]["amount"], 1)
self.assertEqual(b["lotValues"][1]["value"]["amount"], 2)
self.assertEqual("active.qualification", tender["status"])
self.assertIn("tenderers", tender["bids"][0])
self.assertIn("name", tender["bids"][0]["tenderers"][0])
# self.assertIn(tender["awards"][0]["id"], response.headers['Location'])
self.assertEqual(tender["awards"][0]["bid_id"], self.initial_bids[1]["id"])
self.assertEqual(tender["awards"][0]["value"]["amount"], patch_data["bids"][0]["lotValues"][0]["value"]["amount"])
self.assertEqual(tender["awards"][0]["suppliers"], self.initial_bids[0]["tenderers"])
response = self.app.post_json(f"/tenders/{self.tender_id}/auction/{lot_id}", {"data": patch_data}, status=403)
self.assertEqual(response.status, "403 Forbidden")
self.assertEqual(response.content_type, "application/json")
self.assertEqual(
response.json["errors"][0]["description"],
"Can't report auction results in current (active.qualification) tender status",
)
| 45.257764 | 148 | 0.649849 | 6,756 | 58,292 | 5.480018 | 0.035376 | 0.130054 | 0.157173 | 0.07128 | 0.94028 | 0.932366 | 0.917025 | 0.901386 | 0.894768 | 0.887854 | 0 | 0.021294 | 0.171807 | 58,292 | 1,287 | 149 | 45.292929 | 0.745593 | 0.0158 | 0 | 0.70159 | 0 | 0 | 0.286379 | 0.038236 | 0 | 0 | 0 | 0 | 0.345182 | 1 | 0.019645 | false | 0 | 0.004677 | 0 | 0.024322 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
44deae9ff4c97a1342036b2a5649f6fb4a10d075 | 10,619 | py | Python | Artificial Neural Network/and,_or,_nor_gate_implementation_using_ann.py | shyammarjit/Deep-Learning | 79ed1c48d1a17ad4c906c34614ba1b4454fe6d5e | [
"MIT"
] | null | null | null | Artificial Neural Network/and,_or,_nor_gate_implementation_using_ann.py | shyammarjit/Deep-Learning | 79ed1c48d1a17ad4c906c34614ba1b4454fe6d5e | [
"MIT"
] | null | null | null | Artificial Neural Network/and,_or,_nor_gate_implementation_using_ann.py | shyammarjit/Deep-Learning | 79ed1c48d1a17ad4c906c34614ba1b4454fe6d5e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""AND, OR, NOR gate implementation using ANN.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/github/shyammarjit/Deep-Learning/blob/main/AND%2C_OR%2C_NOR_gate_implementation_using_ANN.ipynb
#Implementation of Artificial Neural Network for NOR Logic Gate with 2-bit Binary Input.
"""
# Import Python Libraries
import numpy as np
from matplotlib import pyplot as plt
# Sigmoid Function Implementation
def sigmoid(z):
return 1 / (1 + np.exp(-z))
# Initialization of the neural network parameters
# Initialized all the weights in the range of between 0 and 1
# Bias values are initialized to 0
def initializeParameters(inputFeatures, neuronsInHiddenLayers, outputFeatures):
W1 = np.random.randn(neuronsInHiddenLayers, inputFeatures)
W2 = np.random.randn(outputFeatures, neuronsInHiddenLayers)
b1 = np.zeros((neuronsInHiddenLayers, 1))
b2 = np.zeros((outputFeatures, 1))
parameters = {"W1" : W1, "b1": b1,
"W2" : W2, "b2": b2}
return parameters
# Forward Propagation
def forwardPropagation(X, Y, parameters):
m = X.shape[1]
W1 = parameters["W1"]
W2 = parameters["W2"]
b1 = parameters["b1"]
b2 = parameters["b2"]
Z1 = np.dot(W1, X) + b1
A1 = sigmoid(Z1)
Z2 = np.dot(W2, A1) + b2
A2 = sigmoid(Z2)
cache = (Z1, A1, W1, b1, Z2, A2, W2, b2)
logprobs = np.multiply(np.log(A2), Y) + np.multiply(np.log(1 - A2), (1 - Y))
cost = -np.sum(logprobs) / m
return cost, cache, A2
# Backward Propagation
def backwardPropagation(X, Y, cache):
m = X.shape[1]
(Z1, A1, W1, b1, Z2, A2, W2, b2) = cache
dZ2 = A2 - Y
dW2 = np.dot(dZ2, A1.T) / m
db2 = np.sum(dZ2, axis = 1, keepdims = True)
dA1 = np.dot(W2.T, dZ2)
dZ1 = np.multiply(dA1, A1 * (1- A1))
dW1 = np.dot(dZ1, X.T) / m
db1 = np.sum(dZ1, axis = 1, keepdims = True) / m
gradients = {"dZ2": dZ2, "dW2": dW2, "db2": db2,
"dZ1": dZ1, "dW1": dW1, "db1": db1}
return gradients
# Updating the weights based on the negative gradients
def updateParameters(parameters, gradients, learningRate):
parameters["W1"] = parameters["W1"] - learningRate * gradients["dW1"]
parameters["W2"] = parameters["W2"] - learningRate * gradients["dW2"]
parameters["b1"] = parameters["b1"] - learningRate * gradients["db1"]
parameters["b2"] = parameters["b2"] - learningRate * gradients["db2"]
return parameters
# Model to learn the NOR truth table
X = np.array([[0, 0, 1, 1], [0, 1, 0, 1]]) # NOR input
Y = np.array([[1, 0, 0, 0]]) # NOR output
# Define model parameters
neuronsInHiddenLayers = 2 # number of hidden layer neurons (2)
inputFeatures = X.shape[0] # number of input features (2)
outputFeatures = Y.shape[0] # number of output features (1)
parameters = initializeParameters(inputFeatures, neuronsInHiddenLayers, outputFeatures)
epoch = 100000
learningRate = 0.01
losses = np.zeros((epoch, 1))
for i in range(epoch):
losses[i, 0], cache, A2 = forwardPropagation(X, Y, parameters)
gradients = backwardPropagation(X, Y, cache)
parameters = updateParameters(parameters, gradients, learningRate)
# Evaluating the performance
plt.figure()
plt.plot(losses)
plt.title("Implementation of Artificial Neural Network for NOR Logic Gate with 2-bit Binary Input.")
plt.xlabel("EPOCHS")
plt.ylabel("Loss value")
plt.show()
# Testing
X = np.array([[1, 1, 0, 0], [0, 1, 0, 1]]) # NOR input
cost, _, A2 = forwardPropagation(X, Y, parameters)
prediction = (A2 > 0.5) * 1.0
# print(A2)
print("INPUT: \n"+str(X))
print("OUTPUT: " + str(prediction))
"""#Implementation of Artificial Neural Network for AND Logic Gate with 2-bit Binary Input.
"""
# Import Python Libraries
import numpy as np
from matplotlib import pyplot as plt
# Sigmoid Function
def sigmoid(z):
return 1 / (1 + np.exp(-z))
# Initialization of the neural network parameters
# Initialized all the weights in the range of between 0 and 1
# Bias values are initialized to 0
def initializeParameters(inputFeatures, neuronsInHiddenLayers, outputFeatures):
W1 = np.random.randn(neuronsInHiddenLayers, inputFeatures)
W2 = np.random.randn(outputFeatures, neuronsInHiddenLayers)
b1 = np.zeros((neuronsInHiddenLayers, 1))
b2 = np.zeros((outputFeatures, 1))
parameters = {"W1" : W1, "b1": b1,
"W2" : W2, "b2": b2}
return parameters
# Forward Propagation
def forwardPropagation(X, Y, parameters):
m = X.shape[1]
W1 = parameters["W1"]
W2 = parameters["W2"]
b1 = parameters["b1"]
b2 = parameters["b2"]
Z1 = np.dot(W1, X) + b1
A1 = sigmoid(Z1)
Z2 = np.dot(W2, A1) + b2
A2 = sigmoid(Z2)
cache = (Z1, A1, W1, b1, Z2, A2, W2, b2)
logprobs = np.multiply(np.log(A2), Y) + np.multiply(np.log(1 - A2), (1 - Y))
cost = -np.sum(logprobs) / m
return cost, cache, A2
# Backward Propagation
def backwardPropagation(X, Y, cache):
m = X.shape[1]
(Z1, A1, W1, b1, Z2, A2, W2, b2) = cache
dZ2 = A2 - Y
dW2 = np.dot(dZ2, A1.T) / m
db2 = np.sum(dZ2, axis = 1, keepdims = True)
dA1 = np.dot(W2.T, dZ2)
dZ1 = np.multiply(dA1, A1 * (1- A1))
dW1 = np.dot(dZ1, X.T) / m
db1 = np.sum(dZ1, axis = 1, keepdims = True) / m
gradients = {"dZ2": dZ2, "dW2": dW2, "db2": db2,
"dZ1": dZ1, "dW1": dW1, "db1": db1}
return gradients
# Updating the weights based on the negative gradients
def updateParameters(parameters, gradients, learningRate):
parameters["W1"] = parameters["W1"] - learningRate * gradients["dW1"]
parameters["W2"] = parameters["W2"] - learningRate * gradients["dW2"]
parameters["b1"] = parameters["b1"] - learningRate * gradients["db1"]
parameters["b2"] = parameters["b2"] - learningRate * gradients["db2"]
return parameters
# Model to learn the AND truth table
X = np.array([[0, 0, 1, 1], [0, 1, 0, 1]]) # AND input
Y = np.array([[0, 0, 0, 1]]) # AND output
# Define model parameters
neuronsInHiddenLayers = 2 # number of hidden layer neurons (2)
inputFeatures = X.shape[0] # number of input features (2)
outputFeatures = Y.shape[0] # number of output features (1)
parameters = initializeParameters(inputFeatures, neuronsInHiddenLayers, outputFeatures)
epoch = 100000
learningRate = 0.01
losses = np.zeros((epoch, 1))
for i in range(epoch):
losses[i, 0], cache, A2 = forwardPropagation(X, Y, parameters)
gradients = backwardPropagation(X, Y, cache)
parameters = updateParameters(parameters, gradients, learningRate)
# Evaluating the performance
plt.figure()
plt.plot(losses)
plt.title("Implementation of Artificial Neural Network for AND Logic Gate with 2-bit Binary Input")
plt.xlabel("EPOCHS")
plt.ylabel("Loss value")
plt.show()
# Testing
X = np.array([[1, 1, 0, 0], [0, 1, 0, 1]]) # AND input
cost, _, A2 = forwardPropagation(X, Y, parameters)
prediction = (A2 > 0.5) * 1.0
# print(A2)
print("INPUT: \n"+str(X))
print("OUTPUT: " + str(prediction))
"""#Implementation of Artificial Neural Network for OR Logic Gate with 2-bit Binary Input"""
# Import Python Libraries
import numpy as np
from matplotlib import pyplot as plt
# Sigmoid Function
def sigmoid(z):
return 1 / (1 + np.exp(-z))
# Initialization of the neural network parameters
# Initialized all the weights in the range of between 0 and 1
# Bias values are initialized to 0
def initializeParameters(inputFeatures, neuronsInHiddenLayers, outputFeatures):
W1 = np.random.randn(neuronsInHiddenLayers, inputFeatures)
W2 = np.random.randn(outputFeatures, neuronsInHiddenLayers)
b1 = np.zeros((neuronsInHiddenLayers, 1))
b2 = np.zeros((outputFeatures, 1))
parameters = {"W1" : W1, "b1": b1,
"W2" : W2, "b2": b2}
return parameters
# Forward Propagation
def forwardPropagation(X, Y, parameters):
m = X.shape[1]
W1 = parameters["W1"]
W2 = parameters["W2"]
b1 = parameters["b1"]
b2 = parameters["b2"]
Z1 = np.dot(W1, X) + b1
A1 = sigmoid(Z1)
Z2 = np.dot(W2, A1) + b2
A2 = sigmoid(Z2)
cache = (Z1, A1, W1, b1, Z2, A2, W2, b2)
logprobs = np.multiply(np.log(A2), Y) + np.multiply(np.log(1 - A2), (1 - Y))
cost = -np.sum(logprobs) / m
return cost, cache, A2
# Backward Propagation
def backwardPropagation(X, Y, cache):
m = X.shape[1]
(Z1, A1, W1, b1, Z2, A2, W2, b2) = cache
dZ2 = A2 - Y
dW2 = np.dot(dZ2, A1.T) / m
db2 = np.sum(dZ2, axis = 1, keepdims = True)
dA1 = np.dot(W2.T, dZ2)
dZ1 = np.multiply(dA1, A1 * (1- A1))
dW1 = np.dot(dZ1, X.T) / m
db1 = np.sum(dZ1, axis = 1, keepdims = True) / m
gradients = {"dZ2": dZ2, "dW2": dW2, "db2": db2,
"dZ1": dZ1, "dW1": dW1, "db1": db1}
return gradients
# Updating the weights based on the negative gradients
def updateParameters(parameters, gradients, learningRate):
parameters["W1"] = parameters["W1"] - learningRate * gradients["dW1"]
parameters["W2"] = parameters["W2"] - learningRate * gradients["dW2"]
parameters["b1"] = parameters["b1"] - learningRate * gradients["db1"]
parameters["b2"] = parameters["b2"] - learningRate * gradients["db2"]
return parameters
# Model to learn the OR truth table
X = np.array([[0, 0, 1, 1], [0, 1, 0, 1]]) # OR input
Y = np.array([[0, 1, 1, 1]]) # OR output
# Define model parameters
neuronsInHiddenLayers = 2 # number of hidden layer neurons (2)
inputFeatures = X.shape[0] # number of input features (2)
outputFeatures = Y.shape[0] # number of output features (1)
parameters = initializeParameters(inputFeatures, neuronsInHiddenLayers, outputFeatures)
epoch = 100000
learningRate = 0.01
losses = np.zeros((epoch, 1))
for i in range(epoch):
losses[i, 0], cache, A2 = forwardPropagation(X, Y, parameters)
gradients = backwardPropagation(X, Y, cache)
parameters = updateParameters(parameters, gradients, learningRate)
# Evaluating the performance
plt.figure()
plt.plot(losses)
plt.title("Implementation of Artificial Neural Network for OR Logic Gate with 2-bit Binary Input")
plt.xlabel("EPOCHS")
plt.ylabel("Loss value")
plt.show()
# Testing
X = np.array([[1, 1, 0, 0], [0, 1, 0, 1]]) # OR input
cost, _, A2 = forwardPropagation(X, Y, parameters)
prediction = (A2 > 0.5) * 1.0
# print(A2)
print("INPUT: \n"+str(X))
print("OUTPUT: " + str(prediction)) | 34.035256 | 133 | 0.642245 | 1,440 | 10,619 | 4.729861 | 0.109028 | 0.004992 | 0.026428 | 0.039642 | 0.97137 | 0.968727 | 0.954192 | 0.954192 | 0.954192 | 0.954192 | 0 | 0.055596 | 0.21744 | 10,619 | 312 | 134 | 34.035256 | 0.764019 | 0.183256 | 0 | 0.971014 | 1 | 0 | 0.066781 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.072464 | false | 0 | 0.028986 | 0.014493 | 0.173913 | 0.028986 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
78546e2f6c2b66ebe2312bf43ac429fa20c9bf17 | 162 | py | Python | gym_muller_brown/envs/__init__.py | jjgoings/gym-muller_brown | 41467f4137008045c759a499cdcbb3aadf04f70c | [
"MIT"
] | 1 | 2022-01-11T17:47:40.000Z | 2022-01-11T17:47:40.000Z | gym_muller_brown/envs/__init__.py | jjgoings/gym-muller_brown | 41467f4137008045c759a499cdcbb3aadf04f70c | [
"MIT"
] | null | null | null | gym_muller_brown/envs/__init__.py | jjgoings/gym-muller_brown | 41467f4137008045c759a499cdcbb3aadf04f70c | [
"MIT"
] | null | null | null | from gym_muller_brown.envs.muller_brown_discrete import MullerBrownDiscreteEnv
from gym_muller_brown.envs.muller_brown_continuous import MullerBrownContinuousEnv
| 54 | 82 | 0.925926 | 20 | 162 | 7.1 | 0.5 | 0.309859 | 0.183099 | 0.253521 | 0.464789 | 0.464789 | 0.464789 | 0 | 0 | 0 | 0 | 0 | 0.049383 | 162 | 2 | 83 | 81 | 0.922078 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
788b55359f7aeb7be96e5e37ce23709876832553 | 545 | py | Python | eval_covid20cases_timm-regnetx_002_Posterize.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | eval_covid20cases_timm-regnetx_002_Posterize.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | eval_covid20cases_timm-regnetx_002_Posterize.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | import os
ls=["python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_0_Posterize.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_1_Posterize.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_2_Posterize.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_3_Posterize.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_4_Posterize.yml",
]
for l in ls:
os.system(l) | 49.545455 | 103 | 0.847706 | 80 | 545 | 5.4 | 0.3 | 0.115741 | 0.138889 | 0.219907 | 0.898148 | 0.898148 | 0.898148 | 0.898148 | 0.898148 | 0.898148 | 0 | 0.058366 | 0.056881 | 545 | 11 | 104 | 49.545455 | 0.782101 | 0 | 0 | 0 | 0 | 0 | 0.879121 | 0.650183 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
7896fac6179524e20207804c60b7fa4621792e8c | 19,825 | py | Python | apis/nb/clients/inventory_manager_client/LicenseApi.py | CiscoDevNet/APIC-EM-Generic-Scripts- | 74211d9488f1e77cf56ef86dba20ec8e8eb49cc1 | [
"ECL-2.0",
"Apache-2.0"
] | 45 | 2016-06-09T15:41:25.000Z | 2019-08-06T17:13:11.000Z | apis/nb/clients/inventory_manager_client/LicenseApi.py | CiscoDevNet/APIC-EM-Generic-Scripts | 74211d9488f1e77cf56ef86dba20ec8e8eb49cc1 | [
"ECL-2.0",
"Apache-2.0"
] | 36 | 2016-06-12T03:03:56.000Z | 2017-03-13T18:20:11.000Z | apis/nb/clients/inventory_manager_client/LicenseApi.py | CiscoDevNet/APIC-EM-Generic-Scripts | 74211d9488f1e77cf56ef86dba20ec8e8eb49cc1 | [
"ECL-2.0",
"Apache-2.0"
] | 15 | 2016-06-22T03:51:37.000Z | 2019-07-10T10:06:02.000Z | #!/usr/bin/env python
#pylint: skip-file
# This source code is licensed under the Apache license found in the
# LICENSE file in the root directory of this project.
import sys
import os
import urllib.request, urllib.parse, urllib.error
from .models import *
class LicenseApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getLicenseInfo(self, **kwargs):
"""Retrieves the license info for a network device based on filters
Args:
deviceId, str: Network Device Id (required)
limit, str: limit (required)
offset, str: offset (required)
sortBy, str: sortBy (required)
order, str: order (required)
countedList, list[str]: countedList (required)
eulaStatusList, list[str]: eulaStatusList (required)
evalPeriodLeftList, list[str]: evalPeriodLeftList (required)
evalPeriodUsedList, list[str]: evalPeriodUsedList (required)
expiredDateList, list[str]: expiredDateList (required)
expiredPeriodList, list[str]: expiredPeriodList (required)
featureVersionList, list[str]: featureVersionList (required)
hostIdList, list[str]: hostIdList (required)
isCountedList, list[str]: isCountedList (required)
isEulaAcceptedList, list[str]: isEulaAcceptedList (required)
isEulaApplicableList, list[str]: isEulaApplicableList (required)
isTechnologyLicenseList, list[str]: isTechnologyLicenseList (required)
licenseFileCountList, list[str]: licenseFileCountList (required)
licenseFileNameList, list[str]: licenseFileNameList (required)
licenseIndexList, list[str]: licenseIndexList (required)
maxUsageCountList, list[str]: maxUsageCountList (required)
parentIdList, list[str]: parentIdList (required)
physicalIndexList, list[str]: physicalIndexList (required)
priorityList, list[str]: priorityList (required)
provisionStateList, list[str]: provisionStateList (required)
statusList, list[str]: statusList (required)
storedUsedList, list[str]: storedUsedList (required)
storeNameList, list[str]: storeNameList (required)
totalCountList, list[str]: totalCountList (required)
licenseTypeList, list[str]: licenseTypeList (required)
usageCountList, list[str]: usageCountList (required)
usageCountRemainingList, list[str]: usageCountRemainingList (required)
validityPeriodList, list[str]: validityPeriodList (required)
validityPeriodRemainingList, list[str]: validityPeriodRemainingList (required)
Returns: LicenseInfoListResult
"""
allParams = ['deviceId', 'limit', 'offset', 'sortBy', 'order', 'countedList', 'eulaStatusList', 'evalPeriodLeftList', 'evalPeriodUsedList', 'expiredDateList', 'expiredPeriodList', 'featureVersionList', 'hostIdList', 'isCountedList', 'isEulaAcceptedList', 'isEulaApplicableList', 'isTechnologyLicenseList', 'licenseFileCountList', 'licenseFileNameList', 'licenseIndexList', 'maxUsageCountList', 'parentIdList', 'physicalIndexList', 'priorityList', 'provisionStateList', 'statusList', 'storedUsedList', 'storeNameList', 'totalCountList', 'licenseTypeList', 'usageCountList', 'usageCountRemainingList', 'validityPeriodList', 'validityPeriodRemainingList']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getLicenseInfo" % key)
params[key] = val
del params['kwargs']
resourcePath = '/license-info/network-device/{deviceId}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('limit' in params):
queryParams['limit'] = self.apiClient.toPathValue(params['limit'])
if ('offset' in params):
queryParams['offset'] = self.apiClient.toPathValue(params['offset'])
if ('sortBy' in params):
queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy'])
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('countedList' in params):
queryParams['countedList'] = self.apiClient.toPathValue(params['countedList'])
if ('eulaStatusList' in params):
queryParams['eulaStatusList'] = self.apiClient.toPathValue(params['eulaStatusList'])
if ('evalPeriodLeftList' in params):
queryParams['evalPeriodLeftList'] = self.apiClient.toPathValue(params['evalPeriodLeftList'])
if ('evalPeriodUsedList' in params):
queryParams['evalPeriodUsedList'] = self.apiClient.toPathValue(params['evalPeriodUsedList'])
if ('expiredDateList' in params):
queryParams['expiredDateList'] = self.apiClient.toPathValue(params['expiredDateList'])
if ('expiredPeriodList' in params):
queryParams['expiredPeriodList'] = self.apiClient.toPathValue(params['expiredPeriodList'])
if ('featureVersionList' in params):
queryParams['featureVersionList'] = self.apiClient.toPathValue(params['featureVersionList'])
if ('hostIdList' in params):
queryParams['hostIdList'] = self.apiClient.toPathValue(params['hostIdList'])
if ('isCountedList' in params):
queryParams['isCountedList'] = self.apiClient.toPathValue(params['isCountedList'])
if ('isEulaAcceptedList' in params):
queryParams['isEulaAcceptedList'] = self.apiClient.toPathValue(params['isEulaAcceptedList'])
if ('isEulaApplicableList' in params):
queryParams['isEulaApplicableList'] = self.apiClient.toPathValue(params['isEulaApplicableList'])
if ('isTechnologyLicenseList' in params):
queryParams['isTechnologyLicenseList'] = self.apiClient.toPathValue(params['isTechnologyLicenseList'])
if ('licenseFileCountList' in params):
queryParams['licenseFileCountList'] = self.apiClient.toPathValue(params['licenseFileCountList'])
if ('licenseFileNameList' in params):
queryParams['licenseFileNameList'] = self.apiClient.toPathValue(params['licenseFileNameList'])
if ('licenseIndexList' in params):
queryParams['licenseIndexList'] = self.apiClient.toPathValue(params['licenseIndexList'])
if ('maxUsageCountList' in params):
queryParams['maxUsageCountList'] = self.apiClient.toPathValue(params['maxUsageCountList'])
if ('parentIdList' in params):
queryParams['parentIdList'] = self.apiClient.toPathValue(params['parentIdList'])
if ('physicalIndexList' in params):
queryParams['physicalIndexList'] = self.apiClient.toPathValue(params['physicalIndexList'])
if ('priorityList' in params):
queryParams['priorityList'] = self.apiClient.toPathValue(params['priorityList'])
if ('provisionStateList' in params):
queryParams['provisionStateList'] = self.apiClient.toPathValue(params['provisionStateList'])
if ('statusList' in params):
queryParams['statusList'] = self.apiClient.toPathValue(params['statusList'])
if ('storedUsedList' in params):
queryParams['storedUsedList'] = self.apiClient.toPathValue(params['storedUsedList'])
if ('storeNameList' in params):
queryParams['storeNameList'] = self.apiClient.toPathValue(params['storeNameList'])
if ('totalCountList' in params):
queryParams['totalCountList'] = self.apiClient.toPathValue(params['totalCountList'])
if ('licenseTypeList' in params):
queryParams['licenseTypeList'] = self.apiClient.toPathValue(params['licenseTypeList'])
if ('usageCountList' in params):
queryParams['usageCountList'] = self.apiClient.toPathValue(params['usageCountList'])
if ('usageCountRemainingList' in params):
queryParams['usageCountRemainingList'] = self.apiClient.toPathValue(params['usageCountRemainingList'])
if ('validityPeriodList' in params):
queryParams['validityPeriodList'] = self.apiClient.toPathValue(params['validityPeriodList'])
if ('validityPeriodRemainingList' in params):
queryParams['validityPeriodRemainingList'] = self.apiClient.toPathValue(params['validityPeriodRemainingList'])
if ('deviceId' in params):
replacement = str(self.apiClient.toPathValue(params['deviceId']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'deviceId' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'LicenseInfoListResult')
return responseObject
def getLicenseInfoCount(self, **kwargs):
"""Retrieves the number of licenses for a network device based on filters
Args:
deviceId, str: Network Device Id (required)
countedList, list[str]: countedList (required)
eulaStatusList, list[str]: eulaStatusList (required)
evalPeriodLeftList, list[str]: evalPeriodLeftList (required)
evalPeriodUsedList, list[str]: evalPeriodUsedList (required)
expiredDateList, list[str]: expiredDateList (required)
expiredPeriodList, list[str]: expiredPeriodList (required)
featureVersionList, list[str]: featureVersionList (required)
hostIdList, list[str]: hostIdList (required)
isCountedList, list[str]: isCountedList (required)
isEulaAcceptedList, list[str]: isEulaAcceptedList (required)
isEulaApplicableList, list[str]: isEulaApplicableList (required)
isTechnologyLicenseList, list[str]: isTechnologyLicenseList (required)
licenseFileCountList, list[str]: licenseFileCountList (required)
licenseFileNameList, list[str]: licenseFileNameList (required)
licenseIndexList, list[str]: licenseIndexList (required)
maxUsageCountList, list[str]: maxUsageCountList (required)
parentIdList, list[str]: parentIdList (required)
physicalIndexList, list[str]: physicalIndexList (required)
priorityList, list[str]: priorityList (required)
provisionStateList, list[str]: provisionStateList (required)
statusList, list[str]: statusList (required)
storedUsedList, list[str]: storedUsedList (required)
storeNameList, list[str]: storeNameList (required)
totalCountList, list[str]: totalCountList (required)
licenseTypeList, list[str]: licenseTypeList (required)
usageCountList, list[str]: usageCountList (required)
usageCountRemainingList, list[str]: usageCountRemainingList (required)
validityPeriodList, list[str]: validityPeriodList (required)
validityPeriodRemainingList, list[str]: validityPeriodRemainingList (required)
Returns: CountResult
"""
allParams = ['deviceId', 'countedList', 'eulaStatusList', 'evalPeriodLeftList', 'evalPeriodUsedList', 'expiredDateList', 'expiredPeriodList', 'featureVersionList', 'hostIdList', 'isCountedList', 'isEulaAcceptedList', 'isEulaApplicableList', 'isTechnologyLicenseList', 'licenseFileCountList', 'licenseFileNameList', 'licenseIndexList', 'maxUsageCountList', 'parentIdList', 'physicalIndexList', 'priorityList', 'provisionStateList', 'statusList', 'storedUsedList', 'storeNameList', 'totalCountList', 'licenseTypeList', 'usageCountList', 'usageCountRemainingList', 'validityPeriodList', 'validityPeriodRemainingList']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getLicenseInfoCount" % key)
params[key] = val
del params['kwargs']
resourcePath = '/license-info/network-device/{deviceId}/count'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('countedList' in params):
queryParams['countedList'] = self.apiClient.toPathValue(params['countedList'])
if ('eulaStatusList' in params):
queryParams['eulaStatusList'] = self.apiClient.toPathValue(params['eulaStatusList'])
if ('evalPeriodLeftList' in params):
queryParams['evalPeriodLeftList'] = self.apiClient.toPathValue(params['evalPeriodLeftList'])
if ('evalPeriodUsedList' in params):
queryParams['evalPeriodUsedList'] = self.apiClient.toPathValue(params['evalPeriodUsedList'])
if ('expiredDateList' in params):
queryParams['expiredDateList'] = self.apiClient.toPathValue(params['expiredDateList'])
if ('expiredPeriodList' in params):
queryParams['expiredPeriodList'] = self.apiClient.toPathValue(params['expiredPeriodList'])
if ('featureVersionList' in params):
queryParams['featureVersionList'] = self.apiClient.toPathValue(params['featureVersionList'])
if ('hostIdList' in params):
queryParams['hostIdList'] = self.apiClient.toPathValue(params['hostIdList'])
if ('isCountedList' in params):
queryParams['isCountedList'] = self.apiClient.toPathValue(params['isCountedList'])
if ('isEulaAcceptedList' in params):
queryParams['isEulaAcceptedList'] = self.apiClient.toPathValue(params['isEulaAcceptedList'])
if ('isEulaApplicableList' in params):
queryParams['isEulaApplicableList'] = self.apiClient.toPathValue(params['isEulaApplicableList'])
if ('isTechnologyLicenseList' in params):
queryParams['isTechnologyLicenseList'] = self.apiClient.toPathValue(params['isTechnologyLicenseList'])
if ('licenseFileCountList' in params):
queryParams['licenseFileCountList'] = self.apiClient.toPathValue(params['licenseFileCountList'])
if ('licenseFileNameList' in params):
queryParams['licenseFileNameList'] = self.apiClient.toPathValue(params['licenseFileNameList'])
if ('licenseIndexList' in params):
queryParams['licenseIndexList'] = self.apiClient.toPathValue(params['licenseIndexList'])
if ('maxUsageCountList' in params):
queryParams['maxUsageCountList'] = self.apiClient.toPathValue(params['maxUsageCountList'])
if ('parentIdList' in params):
queryParams['parentIdList'] = self.apiClient.toPathValue(params['parentIdList'])
if ('physicalIndexList' in params):
queryParams['physicalIndexList'] = self.apiClient.toPathValue(params['physicalIndexList'])
if ('priorityList' in params):
queryParams['priorityList'] = self.apiClient.toPathValue(params['priorityList'])
if ('provisionStateList' in params):
queryParams['provisionStateList'] = self.apiClient.toPathValue(params['provisionStateList'])
if ('statusList' in params):
queryParams['statusList'] = self.apiClient.toPathValue(params['statusList'])
if ('storedUsedList' in params):
queryParams['storedUsedList'] = self.apiClient.toPathValue(params['storedUsedList'])
if ('storeNameList' in params):
queryParams['storeNameList'] = self.apiClient.toPathValue(params['storeNameList'])
if ('totalCountList' in params):
queryParams['totalCountList'] = self.apiClient.toPathValue(params['totalCountList'])
if ('licenseTypeList' in params):
queryParams['licenseTypeList'] = self.apiClient.toPathValue(params['licenseTypeList'])
if ('usageCountList' in params):
queryParams['usageCountList'] = self.apiClient.toPathValue(params['usageCountList'])
if ('usageCountRemainingList' in params):
queryParams['usageCountRemainingList'] = self.apiClient.toPathValue(params['usageCountRemainingList'])
if ('validityPeriodList' in params):
queryParams['validityPeriodList'] = self.apiClient.toPathValue(params['validityPeriodList'])
if ('validityPeriodRemainingList' in params):
queryParams['validityPeriodRemainingList'] = self.apiClient.toPathValue(params['validityPeriodRemainingList'])
if ('deviceId' in params):
replacement = str(self.apiClient.toPathValue(params['deviceId']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'deviceId' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'CountResult')
return responseObject
def getDeviceIdByFilename(self, **kwargs):
"""Retrieves list of devices with given license file name
Args:
licenseFileName, str: License file name (required)
Returns: SuccessResultList
"""
allParams = ['licenseFileName']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getDeviceIdByFilename" % key)
params[key] = val
del params['kwargs']
resourcePath = '/network-device/license/{licenseFileName}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('licenseFileName' in params):
replacement = str(self.apiClient.toPathValue(params['licenseFileName']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'licenseFileName' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SuccessResultList')
return responseObject
| 34.181034 | 660 | 0.656393 | 1,515 | 19,825 | 8.586799 | 0.09637 | 0.07295 | 0.119917 | 0.149896 | 0.90545 | 0.90545 | 0.90545 | 0.90545 | 0.89292 | 0.89292 | 0 | 0 | 0.232787 | 19,825 | 579 | 661 | 34.240069 | 0.855293 | 0.224363 | 0 | 0.850679 | 0 | 0 | 0.312877 | 0.05087 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0181 | false | 0 | 0.0181 | 0 | 0.067873 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
78aea9f17459101e9ba3f33b9cb78e94cc760a83 | 1,204 | py | Python | python/8.py | kylekanos/project-euler-1 | af7089356a4cea90f8ef331cfdc65e696def6140 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | python/8.py | kylekanos/project-euler-1 | af7089356a4cea90f8ef331cfdc65e696def6140 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | python/8.py | kylekanos/project-euler-1 | af7089356a4cea90f8ef331cfdc65e696def6140 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2019-09-17T00:55:58.000Z | 2019-09-17T00:55:58.000Z | #!/usr/bin/env python
import re
# just implementation
s = """
73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450
"""
s = re.sub('\n','',s)
print max(reduce(lambda x,y: x*y, (int(s[i+j]) for j in xrange(5))) for i in xrange(1000-5))
| 38.83871 | 92 | 0.921927 | 57 | 1,204 | 19.473684 | 0.789474 | 0.003604 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.87175 | 0.041528 | 1,204 | 30 | 93 | 40.133333 | 0.090121 | 0.033223 | 0 | 0 | 0 | 0 | 0.880379 | 0.860585 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0.04 | null | null | 0.04 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
78b385a786e02b4d842b65096484c1bee5057fce | 55,635 | py | Python | tests/test_metadata.py | jmakovecki/sentinel3 | b0811d9204aa276cac1e4ba2869f5dca96198452 | [
"Apache-2.0"
] | null | null | null | tests/test_metadata.py | jmakovecki/sentinel3 | b0811d9204aa276cac1e4ba2869f5dca96198452 | [
"Apache-2.0"
] | null | null | null | tests/test_metadata.py | jmakovecki/sentinel3 | b0811d9204aa276cac1e4ba2869f5dca96198452 | [
"Apache-2.0"
] | null | null | null | import unittest
import pystac
from pystac.extensions.eo import EOExtension
from pystac.extensions.sat import SatExtension
from stactools.sentinel3.metadata_links import MetadataLinks
from stactools.sentinel3.product_metadata import ProductMetadata
from stactools.sentinel3.properties import (fill_eo_properties,
fill_sat_properties)
from tests import test_data
class Sentinel3OLCIMetadataTest(unittest.TestCase):
def test_parses_olci_1_efr_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_OL_1_EFR____20211021T073827_20211021T074112_20211021T091357_"
"0164_077_334_4320_LN1_O_NR_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"brightPixels_percentage":
item.properties["s3:brightPixels_percentage"],
"invalidPixels_percentage":
item.properties["s3:invalidPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"dubiousSamples_percentage":
item.properties["s3:dubiousSamples_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [-44.0441, -83.51, 13.0151, -68.2251],
"datetime": "2021-10-21T07:39:49.724590Z",
"orbit_state": "descending",
"absolute_orbit": 29567,
"relative_orbit": 334,
"instruments": ["OLCI"],
"mode": "EO",
"productType": "OL_1_EFR___",
"salineWaterPixels_percentage": 44.0,
"coastalPixels_percentage": 0.0,
"freshInlandWaterPixels_percentage": 0.0,
"tidalRegionPixels_percentage": 0.0,
"brightPixels_percentage": 99.0,
"invalidPixels_percentage": 1.0,
"cosmeticPixels_percentage": 0.0,
"duplicatedPixels_percentage": 25.0,
"saturatedPixels_percentage": 0.0,
"dubiousSamples_percentage": 0.0,
"shape": [4865, 3749]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_olci_1_err_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3B_OL_1_ERR____20210831T200148_20210831T204600_20210902T011514_"
"2652_056_242______LN1_O_NT_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"brightPixels_percentage":
item.properties["s3:brightPixels_percentage"],
"invalidPixels_percentage":
item.properties["s3:invalidPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"dubiousSamples_percentage":
item.properties["s3:dubiousSamples_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [-179.151, -64.2325, 179.92, 89.5069],
"datetime": "2021-08-31T20:23:54.000366Z",
"orbit_state": "ascending",
"absolute_orbit": 17454,
"relative_orbit": 242,
"instruments": ["OLCI"],
"mode": "EO",
"productType": "OL_1_ERR___",
"salineWaterPixels_percentage": 90.0,
"coastalPixels_percentage": 0.0,
"freshInlandWaterPixels_percentage": 0.0,
"tidalRegionPixels_percentage": 0.0,
"brightPixels_percentage": 47.0,
"invalidPixels_percentage": 3.0,
"cosmeticPixels_percentage": 0.0,
"duplicatedPixels_percentage": 0.0,
"saturatedPixels_percentage": 8e-06,
"dubiousSamples_percentage": 0.0,
"shape": [1217, 15070]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_olci_2_lfr_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_OL_2_LFR____20210523T003029_20210523T003329_20210524T050403_"
"0179_072_102_1980_LN1_O_NT_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"],
"invalidPixels_percentage":
item.properties["s3:invalidPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"dubiousSamples_percentage":
item.properties["s3:dubiousSamples_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [138.497, 49.8938, 164.009, 62.918],
"datetime": "2021-05-23T00:31:59.485583Z",
"orbit_state": "descending",
"absolute_orbit": 27410,
"relative_orbit": 102,
"cloud_cover": 83.0,
"instruments": ["OLCI"],
"mode": "EO",
"productType": "OL_2_LFR___",
"salineWaterPixels_percentage": 4.0,
"coastalPixels_percentage": 0.0082,
"freshInlandWaterPixels_percentage": 0.0,
"tidalRegionPixels_percentage": 1.0,
"landPixels_percentage": 4.0,
"invalidPixels_percentage": 4.0,
"cosmeticPixels_percentage": 0.0,
"duplicatedPixels_percentage": 1.545942,
"saturatedPixels_percentage": 0.0,
"dubiousSamples_percentage": 0.0,
"shape": [4865, 4090]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_olci_2_lrr_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3B_OL_2_LRR____20210731T214325_20210731T222741_20210802T020007_"
"2656_055_186______LN1_O_NT_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"],
"invalidPixels_percentage":
item.properties["s3:invalidPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"dubiousSamples_percentage":
item.properties["s3:dubiousSamples_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [-179.968, -53.7609, 179.943, 89.6231],
"datetime": "2021-07-31T22:05:32.974566Z",
"orbit_state": "ascending",
"absolute_orbit": 17013,
"relative_orbit": 186,
"cloud_cover": 51.0,
"instruments": ["OLCI"],
"mode": "EO",
"productType": "OL_2_LRR___",
"salineWaterPixels_percentage": 35.0,
"coastalPixels_percentage": 0.332161,
"freshInlandWaterPixels_percentage": 0.0,
"tidalRegionPixels_percentage": 0.0,
"landPixels_percentage": 1.0,
"invalidPixels_percentage": 4.0,
"cosmeticPixels_percentage": 0.0,
"duplicatedPixels_percentage": 0.0,
"saturatedPixels_percentage": 0.0,
"dubiousSamples_percentage": 0.0,
"shape": [1217, 15092]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_olci_2_wfr_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_OL_2_WFR____20210604T001016_20210604T001316_20210604T021918_"
"0179_072_273_1440_MAR_O_NR_003.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"],
"invalidPixels_percentage":
item.properties["s3:invalidPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"dubiousSamples_percentage":
item.properties["s3:dubiousSamples_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [-176.303, 76.7724, 179.972, 88.9826],
"datetime": "2021-06-04T00:11:45.867265Z",
"orbit_state": "ascending",
"absolute_orbit": 27581,
"relative_orbit": 273,
"cloud_cover": 67.0,
"instruments": ["OLCI"],
"mode": "EO",
"productType": "OL_2_WFR___",
"salineWaterPixels_percentage": 0.0,
"coastalPixels_percentage": 0.013921,
"freshInlandWaterPixels_percentage": 0.0,
"tidalRegionPixels_percentage": 0.0,
"landPixels_percentage": 0.0,
"invalidPixels_percentage": 3.0,
"cosmeticPixels_percentage": 0.0,
"duplicatedPixels_percentage": 11.701367,
"saturatedPixels_percentage": 0.0,
"dubiousSamples_percentage": 0.0,
"shape": [4865, 4091]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_slstr_1_rbt_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SL_1_RBT____20210930T220914_20210930T221214_20211002T102150_"
"0180_077_043_5400_LN2_O_NT_004.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"outOfRangePixels_percentage":
item.properties["s3:outOfRangePixels_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [-3.34105, -39.7421, 15.4906, -25.8488],
"datetime": "2021-09-30T22:10:43.843538Z",
"orbit_state": "ascending",
"absolute_orbit": 29276,
"relative_orbit": 43,
"cloud_cover": 80.216007,
"instruments": ["SLSTR"],
"mode": "EO",
"productType": "SL_1_RBT___",
"salineWaterPixels_percentage": 100.0,
"landPixels_percentage": 0.0,
"coastalPixels_percentage": 0.0,
"freshInlandWaterPixels_percentage": 0.0,
"tidalRegionPixels_percentage": 0.0,
"cosmeticPixels_percentage": 28.085521,
"duplicatedPixels_percentage": 5.105382,
"saturatedPixels_percentage": 0.0,
"outOfRangePixels_percentage": 0.0,
"shape": [1500, 1200]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_slstr_2_frp_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SL_2_FRP____20210802T000420_20210802T000720_20210803T123912_"
"0179_074_344_2880_LN2_O_NT_004.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"outOfRangePixels_percentage":
item.properties["s3:outOfRangePixels_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [139.182, -3.03934, 154.722, 10.4264],
"datetime": "2021-08-02T00:05:49.503088Z",
"orbit_state": "descending",
"absolute_orbit": 28422,
"relative_orbit": 344,
"cloud_cover": 63.904667,
"instruments": ["SLSTR"],
"mode": "EO",
"productType": "SL_2_FRP___",
"salineWaterPixels_percentage": 99.891,
"landPixels_percentage": 0.109,
"coastalPixels_percentage": 0.017944,
"freshInlandWaterPixels_percentage": 0.000167,
"tidalRegionPixels_percentage": 0.0,
"cosmeticPixels_percentage": 21.585889,
"duplicatedPixels_percentage": 5.461111,
"saturatedPixels_percentage": 0.0,
"outOfRangePixels_percentage": 0.184722,
"shape": [1500, 1200]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_slstr_2_lst_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SL_2_LST____20210510T002955_20210510T003255_20210511T101010_"
"0179_071_301_5760_LN2_O_NT_004.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"outOfRangePixels_percentage":
item.properties["s3:outOfRangePixels_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [-41.5076, -18.6129, -25.5773, -5.01269],
"datetime": "2021-05-10T00:31:24.660731Z",
"orbit_state": "ascending",
"absolute_orbit": 27224,
"relative_orbit": 301,
"cloud_cover": 57.378222,
"instruments": ["SLSTR"],
"mode": "EO",
"productType": "SL_2_LST___",
"salineWaterPixels_percentage": 78.747222,
"landPixels_percentage": 21.252778,
"coastalPixels_percentage": 0.050167,
"freshInlandWaterPixels_percentage": 0.169778,
"tidalRegionPixels_percentage": 0.899167,
"cosmeticPixels_percentage": 21.881167,
"duplicatedPixels_percentage": 5.449222,
"saturatedPixels_percentage": 0.0,
"outOfRangePixels_percentage": 0.0,
"shape": [1500, 1200]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_slstr_2_wst_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3B_SL_2_WST____20210419T051754_20210419T065853_20210420T160434_"
"6059_051_247______MAR_O_NT_003.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"cosmeticPixels_percentage":
item.properties["s3:cosmeticPixels_percentage"],
"duplicatedPixels_percentage":
item.properties["s3:duplicatedPixels_percentage"],
"saturatedPixels_percentage":
item.properties["s3:saturatedPixels_percentage"],
"outOfRangePixels_percentage":
item.properties["s3:outOfRangePixels_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [-175.687, -85.8995, 175.031, 89.0613],
"datetime": "2021-04-19T06:08:23.709828Z",
"orbit_state": "descending",
"absolute_orbit": 15534,
"relative_orbit": 247,
"cloud_cover": 67.421502,
"instruments": ["SLSTR"],
"mode": "EO",
"productType": "SL_2_WST___",
"salineWaterPixels_percentage": 69.464947,
"landPixels_percentage": 30.535053,
"coastalPixels_percentage": 0.0,
"freshInlandWaterPixels_percentage": 0.0,
"tidalRegionPixels_percentage": 0.0,
"cosmeticPixels_percentage": 42.198716,
"duplicatedPixels_percentage": 0.0,
"saturatedPixels_percentage": 0.0,
"outOfRangePixels_percentage": 26.93685,
"shape": [1500, 40394]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_sral_2_lan_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SR_2_LAN____20210611T011438_20210611T012436_20210611T024819_"
"0598_072_373______LN3_O_NR_004.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"lrmModePercentage":
item.properties["s3:lrmModePercentage"],
"sarModePercentage":
item.properties["s3:sarModePercentage"],
"landPercentage":
item.properties["s3:landPercentage"],
"closedSeaPercentage":
item.properties["s3:closedSeaPercentage"],
"continentalIcePercentage":
item.properties["s3:continentalIcePercentage"],
"openOceanPercentage":
item.properties["s3:openOceanPercentage"],
}
expected = {
"bbox": [-19.9677, -81.3739, 110.573, -67.0245],
"datetime": "2021-06-11T01:19:37.201974Z",
"orbit_state": "descending",
"absolute_orbit": 27681,
"relative_orbit": 373,
"instruments": ["SRAL"],
"mode": "EO",
"productType": "SR_2_LAN___",
"lrmModePercentage": 0.0,
"sarModePercentage": 100.0,
"landPercentage": 0.0,
"closedSeaPercentage": 0.0,
"continentalIcePercentage": 97.0,
"openOceanPercentage": 3.0,
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_sral_2_wat_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SR_2_WAT____20210704T012815_20210704T021455_20210729T173140_"
"2800_073_316______MAR_O_NT_004.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"lrmModePercentage":
item.properties["s3:lrmModePercentage"],
"sarModePercentage":
item.properties["s3:sarModePercentage"],
"landPercentage":
item.properties["s3:landPercentage"],
"closedSeaPercentage":
item.properties["s3:closedSeaPercentage"],
"continentalIcePercentage":
item.properties["s3:continentalIcePercentage"],
"openOceanPercentage":
item.properties["s3:openOceanPercentage"],
}
expected = {
"bbox": [-153.507, -74.0588, -20.0953, 81.4226],
"datetime": "2021-07-04T01:51:35.180925Z",
"orbit_state": "descending",
"absolute_orbit": 28009,
"relative_orbit": 316,
"instruments": ["SRAL"],
"mode": "EO",
"productType": "SR_2_WAT___",
"lrmModePercentage": 0.0,
"sarModePercentage": 100.0,
"landPercentage": 8.0,
"closedSeaPercentage": 0.0,
"continentalIcePercentage": 0.0,
"openOceanPercentage": 92.0,
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_synergy_2_aod_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3B_SY_2_AOD____20210512T143315_20210512T151738_20210514T064157_"
"2663_052_196______LN2_O_NT_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"],
"shape":
item.properties["s3:shape"]
}
expected = {
"bbox": [-104.241, -54.5223, 112.209, 89.7337],
"datetime": "2021-05-12T14:55:26.593379Z",
"orbit_state": "ascending",
"absolute_orbit": 15868,
"relative_orbit": 196,
"cloud_cover": 82.147057,
"instruments": ["SYNERGY"],
"mode": "EO",
"productType": "SY_2_AOD___",
"salineWaterPixels_percentage": 72.660328,
"landPixels_percentage": 27.276878,
"shape": [324, 4035]
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_synergy_2_syn_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SY_2_SYN____20210325T005418_20210325T005718_20210325T142858_"
"0180_070_031_1620_LN2_O_ST_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"coastalPixels_percentage":
item.properties["s3:coastalPixels_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"]
}
expected = {
"bbox": [-179.619, 69.3884, 179.853, 83.7777],
"datetime": "2021-03-25T00:55:48.019583Z",
"orbit_state": "descending",
"absolute_orbit": 26569,
"relative_orbit": 31,
"cloud_cover": 8.166911,
"instruments": ["SYNERGY"],
"mode": "EO",
"productType": "SY_2_SYN___",
"salineWaterPixels_percentage": 94.483109,
"coastalPixels_percentage": 0.093193,
"freshInlandWaterPixels_percentage": 0.076276,
"tidalRegionPixels_percentage": 0.0,
"landPixels_percentage": 2.368632
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_synergy_2_v10_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SY_2_V10____20210911T000000_20210920T235959_20210928T121452_"
"EUROPE____________LN2_O_NT_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"snowOrIcePixels_percentage":
item.properties["s3:snowOrIcePixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"]
}
expected = {
"bbox": [-10.9911, 25.0, 62.0, 75.0],
"datetime": "2021-09-15T23:59:59.500000Z",
"orbit_state": "descending",
"absolute_orbit": 28848,
"relative_orbit": 145,
"cloud_cover": 3.041905,
"instruments": ["SYNERGY"],
"mode": "EO",
"productType": "SY_2_V10___",
"snowOrIcePixels_percentage": 0.154442,
"landPixels_percentage": 65.278832
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_synergy_2_vg1_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SY_2_VG1____20211013T000000_20211013T235959_20211014T203456_"
"EUROPE____________LN2_O_ST_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"snowOrIcePixels_percentage":
item.properties["s3:snowOrIcePixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"]
}
expected = {
"bbox": [-10.9911, 25.0, 62.0, 75.0],
"datetime": "2021-10-13T11:59:59.500000Z",
"orbit_state": "descending",
"absolute_orbit": 29233,
"relative_orbit": 216,
"cloud_cover": 23.811417,
"instruments": ["SYNERGY"],
"mode": "EO",
"productType": "SY_2_VG1___",
"snowOrIcePixels_percentage": 0.102883,
"landPixels_percentage": 46.680979
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
def test_parses_synergy_2_vgp_metadata_properties(self):
# Get the path of the test xml
manifest_path = test_data.get_path(
"data-files/"
"S3A_SY_2_VGP____20210703T142237_20210703T150700_20210703T211742_"
"2663_073_310______LN2_O_ST_002.SEN3")
metalinks = MetadataLinks(manifest_path)
product_metadata = ProductMetadata(manifest_path, metalinks.manifest)
item = pystac.Item(
id=product_metadata.scene_id,
geometry=product_metadata.geometry,
bbox=product_metadata.bbox,
datetime=product_metadata.get_datetime,
properties={},
stac_extensions=[],
)
# ---- Add Extensions ----
# sat
sat = SatExtension.ext(item, add_if_missing=True)
fill_sat_properties(sat, metalinks.manifest)
# eo
eo = EOExtension.ext(item, add_if_missing=True)
fill_eo_properties(eo, metalinks.manifest)
# s3 properties
item.properties.update({**product_metadata.metadata_dict})
# Make a dictionary of the properties
s3_props = {
"bbox":
item.bbox,
"datetime":
item.datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
"orbit_state":
item.properties["sat:orbit_state"],
"absolute_orbit":
item.properties["sat:absolute_orbit"],
"relative_orbit":
item.properties["sat:relative_orbit"],
"cloud_cover":
item.properties["eo:cloud_cover"],
"instruments":
item.properties["instruments"],
"mode":
item.properties["s3:mode"],
"productType":
item.properties["s3:productType"],
"snowOrIcePixels_percentage":
item.properties["s3:snowOrIcePixels_percentage"],
"salineWaterPixels_percentage":
item.properties["s3:salineWaterPixels_percentage"],
"coastalPixelss_percentage":
item.properties["s3:coastalPixelss_percentage"],
"freshInlandWaterPixels_percentage":
item.properties["s3:freshInlandWaterPixels_percentage"],
"tidalRegionPixels_percentage":
item.properties["s3:tidalRegionPixels_percentage"],
"landPixels_percentage":
item.properties["s3:landPixels_percentage"]
}
expected = {
"bbox": [-98.2945, -49.2134, 115.456, 89.5354],
"datetime": "2021-07-03T14:44:48.463954Z",
"orbit_state": "ascending",
"absolute_orbit": 28003,
"relative_orbit": 310,
"cloud_cover": 1.692044,
"instruments": ["SYNERGY"],
"mode": "EO",
"productType": "SY_2_VGP___",
"snowOrIcePixels_percentage": 0.436467,
"salineWaterPixels_percentage": 67.744293,
"coastalPixelss_percentage": 0.169447,
"freshInlandWaterPixels_percentage": 0.878855,
"tidalRegionPixels_percentage": 0.470567,
"landPixels_percentage": 32.227482
}
for k, v in expected.items():
self.assertIn(k, s3_props)
self.assertEqual(s3_props[k], v)
| 36.362745 | 78 | 0.581271 | 5,062 | 55,635 | 6.111221 | 0.095417 | 0.112688 | 0.081203 | 0.086569 | 0.864328 | 0.845644 | 0.835623 | 0.812252 | 0.802909 | 0.802909 | 0 | 0.06857 | 0.30745 | 55,635 | 1,529 | 79 | 36.386527 | 0.734311 | 0.031904 | 0 | 0.784964 | 0 | 0 | 0.306717 | 0.20475 | 0 | 0 | 0 | 0 | 0.025869 | 1 | 0.012935 | false | 0 | 0.006467 | 0 | 0.02021 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
78d604a098ad104e8e2481ccb0b6e99a9bbf4c74 | 256 | pyde | Python | processing/Mod. 6/sketch_6_1_l31/sketch_6_1_l31.pyde | nanam0rgana/2019-fall-polytech-cs | 1a31acb3cf22edc930318dec17324b05dd7788d5 | [
"MIT"
] | null | null | null | processing/Mod. 6/sketch_6_1_l31/sketch_6_1_l31.pyde | nanam0rgana/2019-fall-polytech-cs | 1a31acb3cf22edc930318dec17324b05dd7788d5 | [
"MIT"
] | null | null | null | processing/Mod. 6/sketch_6_1_l31/sketch_6_1_l31.pyde | nanam0rgana/2019-fall-polytech-cs | 1a31acb3cf22edc930318dec17324b05dd7788d5 | [
"MIT"
] | null | null | null | def setup ():
size (300, 300)
smooth ()
strokeWeight (30)
background (0)
def draw ():
stroke (200, 20)
line(mouseX -50,mouseY -50, 100+ mouseX -50, 100+ mouseY -50)
line (100+ mouseX -50,mouseY -50, mouseX -50, 100+ mouseY -50)
| 25.6 | 66 | 0.589844 | 37 | 256 | 4.081081 | 0.486486 | 0.211921 | 0.18543 | 0.211921 | 0.251656 | 0 | 0 | 0 | 0 | 0 | 0 | 0.219895 | 0.253906 | 256 | 9 | 67 | 28.444444 | 0.570681 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | true | 0 | 0 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
156704876f2feb80aa75947827afd85f1bb00eac | 1,034 | py | Python | config/config_en.py | jxhe/cross-lingual-struct-flow | 8fb90fef4f6fcd79c42fc6aefec5418ca5e54833 | [
"MIT"
] | 25 | 2019-06-07T13:47:43.000Z | 2020-09-26T08:23:35.000Z | config/config_en.py | jxhe/cross-lingual-struct-flow | 8fb90fef4f6fcd79c42fc6aefec5418ca5e54833 | [
"MIT"
] | 1 | 2019-11-27T08:05:29.000Z | 2020-11-29T02:10:37.000Z | config/config_en.py | jxhe/cross-lingual-struct-flow | 8fb90fef4f6fcd79c42fc6aefec5418ca5e54833 | [
"MIT"
] | 3 | 2019-07-21T09:48:27.000Z | 2021-02-28T13:56:47.000Z | params_markov={
"couple_layers": 8,
"cell_layers": 1,
"valid_nepoch": 1,
"lstm_layers": 2,
"epochs": 50,
"batch_size": 32,
"emb_dir": "fastText_data",
"train_file": "ud-treebanks-v2.2/UD_English-EWT/en_ewt-ud-train.conllu",
"val_file":"ud-treebanks-v2.2/UD_English-EWT/en_ewt-ud-dev.conllu",
"test_file":"ud-treebanks-v2.2/UD_English-EWT/en_ewt-ud-test.conllu",
"vec_file": "fastText_data/wiki.en.ewt.vec",
"align_file": "multilingual_trans/alignment_matrices/en.txt"
}
params_dmv={
"couple_layers": 8,
"cell_layers": 1,
"lstm_layers": 2,
"valid_nepoch": 1,
"epochs": 10,
"batch_size": 32,
"emb_dir": "fastText_data",
"train_file": "ud-treebanks-v2.2/UD_English-EWT/en_ewt-ud-train.conllu",
"val_file":"ud-treebanks-v2.2/UD_English-EWT/en_ewt-ud-dev.conllu",
"test_file":"ud-treebanks-v2.2/UD_English-EWT/en_ewt-ud-test.conllu",
"vec_file": "fastText_data/wiki.en.ewt.vec",
"align_file": "multilingual_trans/alignment_matrices/en.txt"
}
| 34.466667 | 76 | 0.674081 | 162 | 1,034 | 4.030864 | 0.259259 | 0.061256 | 0.137825 | 0.156202 | 0.872894 | 0.872894 | 0.799387 | 0.799387 | 0.799387 | 0.799387 | 0 | 0.031496 | 0.140232 | 1,034 | 29 | 77 | 35.655172 | 0.703037 | 0 | 0 | 0.785714 | 0 | 0 | 0.702128 | 0.454545 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
159f0997771c24cbe1812642416751a24a5600a0 | 9,639 | py | Python | lambdas/auto_peering/test/test_all_vpcs.py | infrablocks/terraform-aws-vpc-auto-peering-lambda | 386d1157179759f1dfe1b338ab0b1a45e05b5bf0 | [
"MIT"
] | 3 | 2018-02-09T15:53:22.000Z | 2020-01-13T12:52:26.000Z | lambdas/auto_peering/test/test_all_vpcs.py | infrablocks/terraform-aws-vpc-auto-peering-lambda | 386d1157179759f1dfe1b338ab0b1a45e05b5bf0 | [
"MIT"
] | 13 | 2020-01-20T16:02:58.000Z | 2022-03-19T20:49:35.000Z | lambdas/auto_peering/test/test_all_vpcs.py | tobyclemson/terraform-aws-vpc-auto-peering | 36e2c2fc7ec6ae00e47d55037e9543a9e80617f2 | [
"MIT"
] | 1 | 2022-02-21T22:52:31.000Z | 2022-02-21T22:52:31.000Z | import unittest
from unittest import mock
from auto_peering.all_vpcs import AllVPCs
from auto_peering.vpc import VPC
from test import randoms, mocks, builders
class TestAllVPCs(unittest.TestCase):
def test_find_by_account_id_and_vpc_id(self):
account_1_id = randoms.account_id()
account_2_id = randoms.account_id()
region_1_id = randoms.region()
region_2_id = randoms.region()
vpc_id = randoms.vpc_id()
vpc_1_response = mocks.build_vpc_response_mock(name="VPC 1")
vpc_2_response = mocks.build_vpc_response_mock(name="VPC 2")
vpc_3_response = mocks.build_vpc_response_mock(name="VPC 3", id=vpc_id)
vpc_4_response = mocks.build_vpc_response_mock(name="VPC 4")
ec2_gateway_1_1 = mocks.EC2Gateway(account_1_id, region_1_id)
ec2_gateway_1_2 = mocks.EC2Gateway(account_1_id, region_2_id)
ec2_gateway_2_1 = mocks.EC2Gateway(account_2_id, region_1_id)
ec2_gateway_2_2 = mocks.EC2Gateway(account_2_id, region_2_id)
ec2_gateways = mocks.EC2Gateways([
ec2_gateway_1_1, ec2_gateway_1_2, ec2_gateway_2_1, ec2_gateway_2_2,
])
ec2_gateway_2_1.resource().vpcs.all = \
mock.Mock(
name="Account 2 region 1 VPCs",
return_value=[vpc_1_response, vpc_2_response])
ec2_gateway_2_2.resource().vpcs.all = \
mock.Mock(
name="Account 2 region 2 VPCs",
return_value=[vpc_3_response, vpc_4_response])
all_vpcs = AllVPCs(ec2_gateways)
found_vpc = all_vpcs.find_by_account_id_and_vpc_id(account_2_id, vpc_id)
self.assertEqual(found_vpc, VPC(vpc_3_response, account_2_id, region_2_id))
def test_find_by_identifier(self):
account_1_id = randoms.account_id()
account_2_id = randoms.account_id()
region_1_id = randoms.region()
region_2_id = randoms.region()
vpc_identifier = "vpc-2-component-vpc-2-deployment-identifier"
vpc_1_response = mocks.build_vpc_response_mock(
name="VPC 1",
tags=builders.build_vpc_tags(
component="vpc-1-component",
deployment_identifier="vpc-1-deployment-identifier"))
vpc_2_response = mocks.build_vpc_response_mock(
name="VPC 2",
tags=builders.build_vpc_tags(
component="vpc-2-component",
deployment_identifier="vpc-2-deployment-identifier"))
vpc_3_response = mocks.build_vpc_response_mock(
name="VPC 3",
tags=builders.build_vpc_tags(
component="vpc-3-component",
deployment_identifier="vpc-3-deployment-identifier"))
vpc_4_response = mocks.build_vpc_response_mock(
name="VPC 4",
tags=builders.build_vpc_tags(
component="vpc-4-component",
deployment_identifier="vpc-4-deployment-identifier"))
ec2_gateway_1_1 = mocks.EC2Gateway(account_1_id, region_1_id)
ec2_gateway_1_2 = mocks.EC2Gateway(account_1_id, region_2_id)
ec2_gateway_2_1 = mocks.EC2Gateway(account_2_id, region_1_id)
ec2_gateway_2_2 = mocks.EC2Gateway(account_2_id, region_2_id)
ec2_gateways = mocks.EC2Gateways([
ec2_gateway_1_1, ec2_gateway_1_2, ec2_gateway_2_1, ec2_gateway_2_2,
])
ec2_gateway_1_1.resource().vpcs.all = \
mock.Mock(
name="Account 1 region 1 VPCs",
return_value=[vpc_1_response])
ec2_gateway_1_2.resource().vpcs.all = \
mock.Mock(
name="Account 1 region 2 VPCs",
return_value=[vpc_2_response])
ec2_gateway_2_1.resource().vpcs.all = \
mock.Mock(
name="Account 2 region 1 VPCs",
return_value=[vpc_3_response, vpc_4_response])
ec2_gateway_2_2.resource().vpcs.all = \
mock.Mock(
name="Account 2 region 2 VPCs",
return_value=[])
all_vpcs = AllVPCs(ec2_gateways)
found_vpc = all_vpcs.find_by_component_instance_identifier(
vpc_identifier)
self.assertEqual(found_vpc, VPC(vpc_2_response, account_1_id, region_2_id))
def test_find_dependencies_of_vpc(self):
account_1_id = randoms.account_id()
account_2_id = randoms.account_id()
region_1_id = randoms.region()
region_2_id = randoms.region()
target_vpc = VPC(mocks.build_vpc_response_mock(
name="Target VPC",
tags=builders.build_vpc_tags(
dependencies=[
"component-1-deployment-2",
"component-4-default"
])), account_1_id, region_1_id)
vpc_1_response = mocks.build_vpc_response_mock(
name="VPC 1",
tags=builders.build_vpc_tags(
component="component-1",
deployment_identifier="deployment-1"))
vpc_2_response = mocks.build_vpc_response_mock(
name="VPC 2",
tags=builders.build_vpc_tags(
component="component-1",
deployment_identifier="deployment-2"))
vpc_3_response = mocks.build_vpc_response_mock(
name="VPC 3",
tags=builders.build_vpc_tags(
component="component-2",
deployment_identifier="deployment-1"))
vpc_4_response = mocks.build_vpc_response_mock(
name="VPC 4",
tags=builders.build_vpc_tags(
component="component-4",
deployment_identifier="default"))
ec2_gateway_1_1 = mocks.EC2Gateway(account_1_id, region_1_id)
ec2_gateway_1_2 = mocks.EC2Gateway(account_1_id, region_2_id)
ec2_gateway_2_1 = mocks.EC2Gateway(account_2_id, region_1_id)
ec2_gateway_2_2 = mocks.EC2Gateway(account_2_id, region_2_id)
ec2_gateways = mocks.EC2Gateways([
ec2_gateway_1_1, ec2_gateway_1_2, ec2_gateway_2_1, ec2_gateway_2_2,
])
ec2_gateway_1_1.resource().vpcs.all = \
mock.Mock(
name="Account 1 region 1 VPCs",
return_value=[vpc_1_response])
ec2_gateway_1_2.resource().vpcs.all = \
mock.Mock(
name="Account 1 region 2 VPCs",
return_value=[vpc_2_response])
ec2_gateway_2_1.resource().vpcs.all = \
mock.Mock(
name="Account 2 region 1 VPCs",
return_value=[vpc_3_response, vpc_4_response])
ec2_gateway_2_2.resource().vpcs.all = \
mock.Mock(
name="Account 2 region 2 VPCs",
return_value=[])
all_vpcs = AllVPCs(ec2_gateways)
found_vpcs = all_vpcs.find_dependencies_of(target_vpc)
self.assertEqual(
set(found_vpcs),
{
VPC(vpc_2_response, account_1_id, region_2_id),
VPC(vpc_4_response, account_2_id, region_1_id)
}
)
def test_find_dependents_of_vpc(self):
account_1_id = randoms.account_id()
account_2_id = randoms.account_id()
region_1_id = randoms.region()
region_2_id = randoms.region()
target_vpc = VPC(mocks.build_vpc_response_mock(
name="Target VPC",
tags=builders.build_vpc_tags(
component="target",
deployment_identifier="default"
)), account_1_id, region_1_id)
vpc_1_response = mocks.build_vpc_response_mock(
name="VPC 1",
tags=builders.build_vpc_tags(
dependencies=["target-default", "other-thing"]))
vpc_2_response = mocks.build_vpc_response_mock(
name="VPC 2",
tags=builders.build_vpc_tags(
dependencies=[]))
vpc_3_response = mocks.build_vpc_response_mock(
name="VPC 3",
tags=builders.build_vpc_tags(
dependencies=[]))
vpc_4_response = mocks.build_vpc_response_mock(
name="VPC 4",
tags=builders.build_vpc_tags(
dependencies=["other-thing", "target-default"]))
ec2_gateway_1_1 = mocks.EC2Gateway(account_1_id, region_1_id)
ec2_gateway_1_2 = mocks.EC2Gateway(account_1_id, region_2_id)
ec2_gateway_2_1 = mocks.EC2Gateway(account_2_id, region_1_id)
ec2_gateway_2_2 = mocks.EC2Gateway(account_2_id, region_2_id)
ec2_gateways = mocks.EC2Gateways([
ec2_gateway_1_1, ec2_gateway_1_2, ec2_gateway_2_1, ec2_gateway_2_2,
])
ec2_gateway_1_1.resource().vpcs.all = \
mock.Mock(
name="Account 1 region 1 VPCs",
return_value=[vpc_1_response])
ec2_gateway_1_2.resource().vpcs.all = \
mock.Mock(
name="Account 1 region 2 VPCs",
return_value=[vpc_2_response])
ec2_gateway_2_1.resource().vpcs.all = \
mock.Mock(
name="Account 2 region 1 VPCs",
return_value=[vpc_3_response, vpc_4_response])
ec2_gateway_2_2.resource().vpcs.all = \
mock.Mock(
name="Account 2 region 2 VPCs",
return_value=[])
all_vpcs = AllVPCs(ec2_gateways)
found_vpcs = all_vpcs.find_dependents_of(target_vpc)
self.assertEqual(
set(found_vpcs),
{
VPC(vpc_1_response, account_1_id, region_1_id),
VPC(vpc_4_response, account_2_id, region_1_id)
}
)
| 38.556 | 83 | 0.612823 | 1,237 | 9,639 | 4.35004 | 0.044462 | 0.085486 | 0.049062 | 0.070247 | 0.879762 | 0.862107 | 0.841851 | 0.81509 | 0.813418 | 0.808957 | 0 | 0.049108 | 0.296504 | 9,639 | 249 | 84 | 38.710843 | 0.744433 | 0 | 0 | 0.747619 | 0 | 0 | 0.085694 | 0.018155 | 0 | 0 | 0 | 0 | 0.019048 | 1 | 0.019048 | false | 0 | 0.02381 | 0 | 0.047619 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ec60c44f06734b8e6c661efb4698a334d4471aa2 | 54 | py | Python | aracle/__init__.py | jiwoncpark/helio-ai | 20aabe27ce65b738b77192242dc89eda612f945e | [
"MIT"
] | 1 | 2020-02-28T23:43:27.000Z | 2020-02-28T23:43:27.000Z | aracle/__init__.py | jiwoncpark/aracle | 20aabe27ce65b738b77192242dc89eda612f945e | [
"MIT"
] | 10 | 2019-09-13T10:11:32.000Z | 2019-11-12T19:22:18.000Z | aracle/__init__.py | jiwoncpark/helio-ai | 20aabe27ce65b738b77192242dc89eda612f945e | [
"MIT"
] | 1 | 2019-11-05T22:14:54.000Z | 2019-11-05T22:14:54.000Z | from .toy_data import toy_squares, generate_toy_data
| 18 | 52 | 0.851852 | 9 | 54 | 4.666667 | 0.666667 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 54 | 2 | 53 | 27 | 0.875 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
ec730b21312e4290d1ce43b7207cc80321bfb7f4 | 2,050 | py | Python | ProjectApplication/project_core/migrations/0176_auto_20211203_1552.py | code-review-doctor/project-application | d85b40b69572efbcda24ce9c40803f76d8ffd192 | [
"MIT"
] | null | null | null | ProjectApplication/project_core/migrations/0176_auto_20211203_1552.py | code-review-doctor/project-application | d85b40b69572efbcda24ce9c40803f76d8ffd192 | [
"MIT"
] | null | null | null | ProjectApplication/project_core/migrations/0176_auto_20211203_1552.py | code-review-doctor/project-application | d85b40b69572efbcda24ce9c40803f76d8ffd192 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.9 on 2021-12-03 14:52
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('project_core', '0175_auto_20211203_1224'),
]
operations = [
migrations.AlterField(
model_name='call',
name='finance_year',
field=models.IntegerField(help_text='Finance year of this call. It is used, for example, for the project key from this call', validators=[django.core.validators.MinValueValidator(2015, 'Finance year cannot be before SPI existed'), django.core.validators.MaxValueValidator(2023, 'Finance year cannot be more than two years after the current year')]),
),
migrations.AlterField(
model_name='historicalcall',
name='finance_year',
field=models.IntegerField(help_text='Finance year of this call. It is used, for example, for the project key from this call', validators=[django.core.validators.MinValueValidator(2015, 'Finance year cannot be before SPI existed'), django.core.validators.MaxValueValidator(2023, 'Finance year cannot be more than two years after the current year')]),
),
migrations.AlterField(
model_name='historicalproject',
name='finance_year',
field=models.IntegerField(help_text='Finance year of this project', validators=[django.core.validators.MinValueValidator(2015, 'Finance year cannot be before SPI existed'), django.core.validators.MaxValueValidator(2023, 'Finance year cannot be more than two years after the current year')]),
),
migrations.AlterField(
model_name='project',
name='finance_year',
field=models.IntegerField(help_text='Finance year of this project', validators=[django.core.validators.MinValueValidator(2015, 'Finance year cannot be before SPI existed'), django.core.validators.MaxValueValidator(2023, 'Finance year cannot be more than two years after the current year')]),
),
]
| 58.571429 | 361 | 0.70439 | 250 | 2,050 | 5.712 | 0.256 | 0.123249 | 0.12605 | 0.106443 | 0.82423 | 0.82423 | 0.82423 | 0.82423 | 0.82423 | 0.82423 | 0 | 0.038368 | 0.199024 | 2,050 | 34 | 362 | 60.294118 | 0.831303 | 0.021951 | 0 | 0.571429 | 1 | 0 | 0.387918 | 0.011483 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.178571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bf2ec247b0c7e4ab1717be3648defc5ace39ef3c | 955 | py | Python | tests/integration/team_stats_test.py | egret85/echovr-api | e135f25fb5b188e2931133d04c47c5e66e83a6c5 | [
"MIT"
] | 7 | 2018-11-02T18:12:18.000Z | 2021-03-08T10:47:59.000Z | tests/integration/team_stats_test.py | egret85/echovr-api | e135f25fb5b188e2931133d04c47c5e66e83a6c5 | [
"MIT"
] | null | null | null | tests/integration/team_stats_test.py | egret85/echovr-api | e135f25fb5b188e2931133d04c47c5e66e83a6c5 | [
"MIT"
] | 4 | 2018-11-02T18:12:08.000Z | 2020-06-19T19:42:39.000Z | import pytest
@pytest.fixture
def team_stats(standard_public_match_gamestate):
return standard_public_match_gamestate.teams[0].stats
def test_possession_time(team_stats):
assert team_stats.possession_time == 77.446526
def test_points(team_stats):
assert team_stats.points == 6
def test_assists(team_stats):
assert team_stats.assists == 0
def test_saves(team_stats):
assert team_stats.saves == 0
def test_stuns(team_stats):
assert team_stats.stuns == 17
def test_goals(team_stats):
assert team_stats.goals == 0
def test_passes(team_stats):
assert team_stats.passes == 0
def test_catches(team_stats):
assert team_stats.catches == 0
def test_steals(team_stats):
assert team_stats.steals == 0
def test_blocks(team_stats):
assert team_stats.blocks == 0
def test_interceptions(team_stats):
assert team_stats.interceptions == 0
def test_shots_taken(team_stats):
assert team_stats.shots_taken == 4
| 22.738095 | 57 | 0.763351 | 144 | 955 | 4.736111 | 0.236111 | 0.329912 | 0.26393 | 0.334311 | 0.422287 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025894 | 0.150785 | 955 | 41 | 58 | 23.292683 | 0.815043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.428571 | 1 | 0.464286 | false | 0.071429 | 0.035714 | 0.035714 | 0.535714 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 7 |
17731c73cba1834402175643850663be874a23b5 | 10,806 | py | Python | fuxi/tests/unit/common/test_state_monitor.py | xxwjj/fuxi | 8e720cfed8c9afcd2bab21d7c9e9ebb1b6f80fcd | [
"Apache-2.0"
] | null | null | null | fuxi/tests/unit/common/test_state_monitor.py | xxwjj/fuxi | 8e720cfed8c9afcd2bab21d7c9e9ebb1b6f80fcd | [
"Apache-2.0"
] | null | null | null | fuxi/tests/unit/common/test_state_monitor.py | xxwjj/fuxi | 8e720cfed8c9afcd2bab21d7c9e9ebb1b6f80fcd | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from cinderclient import exceptions as cinder_exception
from manilaclient.common.apiclient import exceptions as manila_exception
from fuxi.common import state_monitor
from fuxi import exceptions
from fuxi.tests.unit import base, fake_client, fake_object
class TestStateMonitor(base.TestCase):
def setUp(self):
super(TestStateMonitor, self).setUp()
def test_monitor_cinder_volume(self):
fake_cinder_client = fake_client.FakeCinderClient()
fake_cinder_volume = fake_object.FakeCinderVolume(status='available')
fake_desired_state = 'in-use'
fake_transient_states = ('in-use',)
fake_time_limit = 0
fake_state_monitor = state_monitor.StateMonitor(fake_cinder_client,
fake_cinder_volume,
fake_desired_state,
fake_transient_states,
fake_time_limit)
fake_desired_volume = fake_object.FakeCinderVolume(status='in-use')
with mock.patch.object(fake_client.FakeCinderClient.Volumes, 'get',
return_value=fake_desired_volume):
self.assertEqual(fake_desired_volume,
fake_state_monitor.monitor_cinder_volume())
def test_monitor_cinder_volume_get_failed(self):
fake_cinder_client = fake_client.FakeCinderClient()
fake_cinder_volume = fake_object.FakeCinderVolume(status='available')
with mock.patch('fuxi.tests.unit.fake_client.FakeCinderClient.Volumes'
'.get',
side_effect=cinder_exception.ClientException(404)):
fake_state_monitor = state_monitor.StateMonitor(fake_cinder_client,
fake_cinder_volume,
None, None, -1)
self.assertRaises(exceptions.TimeoutException,
fake_state_monitor.monitor_cinder_volume)
with mock.patch('fuxi.tests.unit.fake_client.FakeCinderClient.Volumes'
'.get',
side_effect=cinder_exception.ClientException(404)):
fake_state_monitor = state_monitor.StateMonitor(fake_cinder_client,
fake_cinder_volume,
None, None)
self.assertRaises(cinder_exception.ClientException,
fake_state_monitor.monitor_cinder_volume)
def test_monitor_cinder_volume_unexpected_state(self):
fake_cinder_client = fake_client.FakeCinderClient()
fake_cinder_volume = fake_object.FakeCinderVolume(status='available')
fake_desired_state = 'in-use'
fake_transient_states = ('in-use',)
fake_time_limit = 0
fake_state_monitor = state_monitor.StateMonitor(fake_cinder_client,
fake_cinder_volume,
fake_desired_state,
fake_transient_states,
fake_time_limit)
fake_desired_volume = fake_object.FakeCinderVolume(status='attaching')
with mock.patch.object(fake_client.FakeCinderClient.Volumes, 'get',
return_value=fake_desired_volume):
self.assertRaises(exceptions.UnexpectedStateException,
fake_state_monitor.monitor_cinder_volume)
def test_monitor_manila_share(self):
fake_manila_client = fake_client.FakeManilaClient()
fake_manila_share = fake_object.FakeManilaShare(status='creating')
fake_desired_state = 'available'
fake_transient_states = ('creating',)
fake_state_monitor = state_monitor.StateMonitor(fake_manila_client,
fake_manila_share,
fake_desired_state,
fake_transient_states,
0)
fake_desired_share = fake_object.FakeManilaShare(status='available')
with mock.patch.object(fake_client.FakeManilaClient.Shares, 'get',
return_value=fake_desired_share):
self.assertEqual(fake_desired_share,
fake_state_monitor.monitor_manila_share())
def test_monitor_manila_share_get_failed(self):
fake_manila_client = fake_client.FakeManilaClient()
fake_manila_share = fake_object.FakeManilaShare(status='creating')
with mock.patch('fuxi.tests.unit.fake_client'
'.FakeManilaClient.Shares.get',
side_effect=manila_exception.ClientException(404)):
fake_state_monitor = state_monitor.StateMonitor(fake_manila_client,
fake_manila_share,
None, None, -1)
self.assertRaises(exceptions.TimeoutException,
fake_state_monitor.monitor_manila_share)
with mock.patch('fuxi.tests.unit.fake_client'
'.FakeManilaClient.Shares.get',
side_effect=manila_exception.ClientException(404)):
fake_state_monitor = state_monitor.StateMonitor(fake_manila_client,
fake_manila_share,
None, None)
self.assertRaises(manila_exception.ClientException,
fake_state_monitor.monitor_manila_share)
def test_monitor_manila_share_unexpected_state(self):
fake_manila_client = fake_client.FakeManilaClient()
fake_manila_share = fake_object.FakeManilaShare(status='creating')
fake_state_monitor = state_monitor.StateMonitor(fake_manila_client,
fake_manila_share,
'available',
('creating',),
0)
fake_desired_share = fake_object.FakeCinderVolume(status='unknown')
with mock.patch.object(fake_client.FakeManilaClient.Shares, 'get',
return_value=fake_desired_share):
self.assertRaises(exceptions.UnexpectedStateException,
fake_state_monitor.monitor_manila_share)
def test_monitor_share_access(self):
fake_manila_client = fake_client.FakeManilaClient()
fake_manila_share = fake_object.FakeManilaShare()
fake_state_monitor = state_monitor.StateMonitor(fake_manila_client,
fake_manila_share,
'active',
('new',),
0)
fake_desired_sl = [fake_object.FakeShareAccess(
access_type='ip', access_to='192.168.0.1', state='active')]
with mock.patch.object(fake_client.FakeManilaClient.Shares,
'access_list',
return_value=fake_desired_sl):
self.assertEqual(fake_manila_share,
fake_state_monitor.monitor_share_access(
'ip', '192.168.0.1'))
def test_monitor_share_access_list_failed(self):
fake_manila_client = fake_client.FakeManilaClient()
fake_manila_share = fake_object.FakeManilaShare()
with mock.patch('fuxi.tests.unit.fake_client.FakeManilaClient.Shares'
'.access_list',
side_effect=manila_exception.ClientException(404)):
fake_state_monitor = state_monitor.StateMonitor(fake_manila_client,
fake_manila_share,
None, None, -1)
self.assertRaises(exceptions.TimeoutException,
fake_state_monitor.monitor_share_access,
'ip', '192.168.0.1')
with mock.patch('fuxi.tests.unit.fake_client.FakeManilaClient.Shares'
'.access_list',
side_effect=manila_exception.ClientException(404)):
fake_state_monitor = state_monitor.StateMonitor(fake_manila_client,
fake_manila_share,
None, None)
self.assertRaises(manila_exception.ClientException,
fake_state_monitor.monitor_share_access,
'ip', '192.168.0.1')
def test_monitor_share_access_unexpected_state(self):
fake_manila_client = fake_client.FakeManilaClient()
fake_manila_share = fake_object.FakeManilaShare()
fake_state_monitor = state_monitor.StateMonitor(fake_manila_client,
fake_manila_share,
'active',
('new',),
0)
fake_desired_sl = [fake_object.FakeShareAccess(
access_type='ip', access_to='192.168.0.1', state='unknown')]
with mock.patch.object(fake_client.FakeManilaClient.Shares,
'access_list', return_value=fake_desired_sl):
self.assertRaises(exceptions.UnexpectedStateException,
fake_state_monitor.monitor_share_access,
'ip', '192.168.0.1')
| 54.575758 | 79 | 0.55025 | 953 | 10,806 | 5.883526 | 0.13851 | 0.079187 | 0.068486 | 0.049938 | 0.832352 | 0.816658 | 0.792402 | 0.792402 | 0.76779 | 0.747994 | 0 | 0.01191 | 0.386174 | 10,806 | 197 | 80 | 54.852792 | 0.833409 | 0.048307 | 0 | 0.754717 | 0 | 0 | 0.061429 | 0.030763 | 0 | 0 | 0 | 0 | 0.075472 | 1 | 0.062893 | false | 0 | 0.037736 | 0 | 0.106918 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1789f12ee05a04f319168327544c8626ace38f80 | 5,038 | py | Python | tests/data/configs.py | cailholm/django-saml2-pro-auth | 7da92ad814111726cddea0c4a39f29324a5ab2b4 | [
"MIT"
] | 24 | 2017-11-06T14:08:15.000Z | 2020-01-27T14:26:57.000Z | tests/data/configs.py | cailholm/django-saml2-pro-auth | 7da92ad814111726cddea0c4a39f29324a5ab2b4 | [
"MIT"
] | 29 | 2017-11-01T14:31:00.000Z | 2020-02-06T08:33:14.000Z | tests/data/configs.py | cailholm/django-saml2-pro-auth | 7da92ad814111726cddea0c4a39f29324a5ab2b4 | [
"MIT"
] | 24 | 2017-11-01T15:17:49.000Z | 2020-01-10T17:06:28.000Z | MOCK_SAML2_CONFIG = {
"functionProvider": {
"strict": True,
"debug": True,
"sp": {
"entityId": "https://example.com/sso/saml/metadata?provider=functionProvider",
"assertionConsumerService": {
"url": "https://example.com/sso/saml/?acs&provider=functionProvider",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST",
},
"singleLogoutService": {
"url": "https://example.com/sso/saml/?sls&provider=functionProvider",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified",
"x509cert": open("tests/mock_certs/sp.crt", "r").read(),
"privateKey": open("tests/mock_certs/sp.key", "r").read(),
},
"idp": {
"entityId": "https://myprovider.example.com/0f3172cf",
"singleSignOnService": {
"url": "https://myprovider.example.com/applogin/appKey/0f3172cf/customerId/AA333",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"singleLogoutService": {
"url": "https://myprovider.example.com/applogout",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"x509cert": open("tests/mock_certs/myprovider.crt", "r").read(),
},
"organization": {
"en-US": {
"name": "example inc",
"displayname": "Example Incorporated",
"url": "example.com",
}
},
"contactPerson": {
"technical": {"givenName": "Jane Doe", "emailAddress": "jdoe@examp.com"},
"support": {"givenName": "Jane Doe", "emailAddress": "jdoe@examp.com"},
},
"security": {
"name_id_encrypted": False,
"authn_requests_signed": True,
"logout_requests_signed": False,
"logout_response_signed": False,
"sign_metadata": False,
"want_messages_signed": False,
"want_assertions_signed": True,
"want_name_id": True,
"want_name_id_encrypted": False,
"want_assertions_encrypted": True,
"signature_algorithm": "http://www.w3.org/2000/09/xmldsig#rsa-sha1",
"digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256",
},
},
"classProvider": {
"strict": True,
"debug": True,
"lowercase_urlencoding": False,
"idp_initiated_auth": True,
"sp": {
"entityId": "https://example.com/saml/metadata/classProvider/",
"assertionConsumerService": {
"url": "https://example.com/saml/acs/classProvider/",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST",
},
"singleLogoutService": {
"url": "https://example.com/saml/sls/classProvider/",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified",
"x509cert": open("tests/mock_certs/sp.crt", "r").read(),
"privateKey": open("tests/mock_certs/sp.key", "r").read(),
},
"idp": {
"entityId": "https://myprovider.example.com/0f3172cf",
"singleSignOnService": {
"url": "https://myprovider.example.com/applogin/appKey/0f3172cf/customerId/AA333",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"singleLogoutService": {
"url": "https://myprovider.example.com/applogout",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect",
},
"x509cert": open("tests/mock_certs/myprovider.crt", "r").read(),
},
"organization": {
"en-US": {
"name": "example inc",
"displayname": "Example Incorporated",
"url": "example.com",
}
},
"contactPerson": {
"technical": {"givenName": "Jane Doe", "emailAddress": "jdoe@examp.com"},
"support": {"givenName": "Jane Doe", "emailAddress": "jdoe@examp.com"},
},
"security": {
"name_id_encrypted": False,
"authn_requests_signed": True,
"logout_requests_signed": False,
"logout_response_signed": False,
"sign_metadata": False,
"want_messages_signed": False,
"want_assertions_signed": True,
"want_name_id": True,
"want_name_id_encrypted": False,
"want_assertions_encrypted": True,
"signature_algorithm": "http://www.w3.org/2000/09/xmldsig#rsa-sha1",
"digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256",
},
},
}
| 43.808696 | 98 | 0.530369 | 470 | 5,038 | 5.57234 | 0.223404 | 0.053456 | 0.049637 | 0.057274 | 0.928217 | 0.897289 | 0.862925 | 0.862925 | 0.862925 | 0.862925 | 0 | 0.027066 | 0.303295 | 5,038 | 114 | 99 | 44.192982 | 0.719088 | 0 | 0 | 0.701754 | 0 | 0 | 0.546844 | 0.196308 | 0 | 0 | 0 | 0 | 0.052632 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bd713605feff05a66a8c4c64b35351862cb9058f | 194 | py | Python | pixel_table/utils.py | Spooner/pixel-table | 87ac04adbb74702bee3dcaa5c6bded7786cf73e7 | [
"MIT"
] | null | null | null | pixel_table/utils.py | Spooner/pixel-table | 87ac04adbb74702bee3dcaa5c6bded7786cf73e7 | [
"MIT"
] | null | null | null | pixel_table/utils.py | Spooner/pixel-table | 87ac04adbb74702bee3dcaa5c6bded7786cf73e7 | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function, unicode_literals
import os
def root(*path):
return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", *path))
| 24.25 | 82 | 0.752577 | 27 | 194 | 5 | 0.666667 | 0.133333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.113402 | 194 | 7 | 83 | 27.714286 | 0.784884 | 0 | 0 | 0 | 0 | 0 | 0.010309 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
bd7a79c775f0bc1c631cc238532ad6732c4c8401 | 4,123 | py | Python | advent2018_day01.py | coandco/advent2018 | 5d51780cbcf425857f99c1f6b2c648a3e5852581 | [
"MIT"
] | null | null | null | advent2018_day01.py | coandco/advent2018 | 5d51780cbcf425857f99c1f6b2c648a3e5852581 | [
"MIT"
] | null | null | null | advent2018_day01.py | coandco/advent2018 | 5d51780cbcf425857f99c1f6b2c648a3e5852581 | [
"MIT"
] | null | null | null | # This originally had newlines instead of commas, but I converted it over for readability
INPUT = "-17,+14,+10,-2,-1,+6,+6,+7,+1,+9,+8,-13,-7,+17,-4,-16,-6,-11,-7,-20,+3,+2,-10,-5,+3,+5,+13,-3,-2,-4,+19,-6,+14,-4,+3,+6,+17,+4,-18,+16,+19,-3,-4,+18,-2,+7,-10,-8,+10,-6,+11,+5,-6,+14,-16,-5,+15,+2,+14,-19,+13,+1,-6,-18,+20,+16,-10,+8,-9,+3,+9,+16,-9,-3,-6,+5,+15,-1,+12,-7,-2,-14,+20,-11,+24,-12,+1,-5,+7,+14,+8,-16,-17,-24,+17,+1,+8,-12,+1,-8,-12,-10,+16,-3,-16,-7,+14,-13,-19,+8,-1,-4,-13,-2,+10,-4,-17,-23,+21,+18,-20,+16,-22,-16,-6,-16,-6,+5,+5,-19,-18,+14,-10,-15,+4,+19,-11,-15,+16,-17,-8,-15,+12,-17,+3,-13,+3,+3,-8,-15,-14,+11,+13,-3,-4,+9,+21,-9,+12,-2,+6,+6,+8,-2,+16,-6,+4,-1,+15,+1,+4,+6,-14,+2,-19,-18,+6,-3,+5,+8,-19,-4,-17,-16,-13,-16,+5,-6,-2,+7,-13,-8,-13,-19,+5,+12,-13,+19,-2,+16,+4,+18,+9,-1,-4,-16,+19,+19,+4,-5,-14,+12,-7,-1,-24,-8,-9,-18,-16,+2,-13,-7,+16,-7,+4,-12,-9,-10,+14,+18,-16,+6,+17,-6,+10,+5,+5,+10,+5,-13,-9,-1,+2,+12,-15,+6,+7,+14,+11,-7,+13,+10,+19,+17,+7,+10,-9,-10,+12,-9,+21,+26,+18,-11,-1,-11,+22,+12,-3,-5,-17,+16,+3,+24,+14,+8,+20,-6,-7,-22,+1,-2,+24,+23,-15,+25,+37,-15,+6,+40,+13,+3,+8,-18,+6,-4,+13,+18,-4,+5,-11,+4,+14,-16,+5,-11,-15,+7,+1,+4,+21,+18,+15,-18,-9,-8,+18,-4,+9,+18,-12,+1,+14,-8,+1,-11,-15,+8,+6,-20,-4,+12,-1,-3,-13,-17,+8,+20,-10,+30,+12,-7,-1,-15,+12,-4,+18,-8,+16,+4,+13,-8,-11,-5,+18,+8,+17,+14,-16,+6,+12,+5,+19,+2,+13,-19,+8,+3,-16,-20,+16,+5,-13,-14,-11,+14,+14,-5,+16,+5,+8,+13,-16,-18,-1,+15,-11,-10,-19,-10,-7,-4,+7,-11,-2,+21,-7,+5,-15,-17,-7,+18,-5,+12,+3,+8,+15,-7,-17,+18,-10,+5,+17,-8,+1,-18,-18,-6,-13,-18,+6,-11,-19,-2,-15,-8,-18,-3,-6,+3,+26,+14,+11,+10,+46,+4,+4,-2,+21,+22,+14,+8,-4,+20,-2,+4,+23,+11,+22,+16,-8,+11,+22,-17,+4,-8,-13,-15,+11,+19,-23,-5,-5,+47,+9,-12,+18,+15,+15,+8,+8,+37,-1,-15,-17,-10,+32,+22,+15,-10,+8,+128,+27,+19,+18,+18,+7,+34,+18,+10,-3,-5,+22,+4,+15,+2,+3,+9,-6,-35,-22,-1,+4,+55,-2,+80,+21,-2,+53,+33,+13,-60,-28,-44,-30,+2,-17,+12,+249,+26,-186,+72428,-2,+13,+17,+15,-18,+1,+7,+3,-14,+1,-4,-2,+3,-18,+2,+14,-13,-8,-21,-4,-2,-8,-5,+18,+12,-9,+2,+13,+6,-9,+17,+9,+8,+14,-3,+7,-3,+16,-15,+10,+18,+12,-9,+12,+7,+18,-17,+1,+14,+9,-1,+8,+13,-4,-12,-8,+10,+6,+2,+8,+2,+6,+16,-20,+10,+2,+19,-6,-14,+17,+6,+7,+3,+4,+19,+16,+17,+1,+1,+4,+19,-12,+2,+12,-6,+11,+16,-1,+10,-3,+15,+17,+2,+7,+9,-14,+16,+15,+13,+10,-12,-8,+13,+1,-11,-10,-19,+16,+2,-8,+4,-20,-18,+19,+7,+15,+11,+17,+7,-14,+6,-12,-19,-8,+6,-4,+8,+6,+3,+17,+8,+16,-1,+4,-18,+3,-10,+5,+15,-4,+8,-1,-11,-8,+14,+17,+6,-12,+17,-13,+9,-5,+19,+10,-19,+17,+13,+12,-4,-2,-11,+14,+14,+17,+12,-15,-10,-13,+2,-18,-17,+13,-1,-2,+6,-9,+13,+10,+8,+11,+14,-15,-16,-11,-18,-9,+10,-12,+4,-3,+12,+13,-28,-4,+16,+20,+12,+19,+14,-17,+11,+15,+12,-18,-2,+6,+17,+21,-22,-17,-18,-19,+4,+3,+2,+2,-6,-15,-25,-4,-12,-12,-12,+21,-12,+6,-4,-3,-10,-9,-16,-13,-19,-17,-2,-15,+9,+15,+3,-17,+10,-9,-16,+9,+9,+1,+4,-12,+6,+18,-29,-8,-16,-16,+10,-14,-4,+6,+15,+5,+7,+12,-4,-18,-10,-19,+17,-21,-2,-16,-18,-15,+13,-12,-18,-6,-3,-19,-13,-14,+10,-14,+12,-3,-17,-14,-18,-15,+4,-15,+19,-10,+3,-18,+2,+2,-9,+13,+6,-13,+6,-22,-23,-19,+18,+19,+16,-21,+18,+18,+13,-11,+7,+21,-11,+15,-1,+9,-4,-12,+15,-19,-10,+3,+1,+24,+14,+6,+17,+19,+9,+11,-12,-10,+8,+16,+13,-18,+2,+10,+16,+17,+6,+4,-1,+4,+5,+9,+4,-15,-1,+23,+21,+6,-15,-11,+10,-17,+11,+14,-2,+1,+19,-15,+18,-6,-18,+17,+13,+6,+2,+17,-23,-16,-8,+21,+62,-15,-8,+5,+24,+36,+10,+2,+19,-8,+18,+2,+13,-1,-8,+14,+35,+21,+7,-14,+19,-3,+39,+11,+12,+25,-5,-6,-7,+14,+15,+22,-19,+12,-18,+19,-17,-9,-13,+11,+45,+21,-18,+12,-14,+26,+10,+4,+9,-1,+3,+14,+5,-2,+7,-14,-2,-12,+25,+19,+3,+7,-28,-6,+15,+3,+11,+10,-22,-6,-2,+3,+21,+14,-12,+17,-6,-32,-28,+2,+19,-16,+8,-4,-20,+1,+3,+17,-36,-21,-3,+2,+14,-29,+20,-6,+25,-11,+34,-76,+10,+30,-42,-32,-25,-12,+8,+7,-35,+17,-5,-10,+3,+10,-23,-37,-15,+14,+18,-88,-38,+17,+13,+50,-56,-25,-26,-1,-29,+19,-73214".split(",")
INPUT = [int(x) for x in INPUT]
print("Sum of offsets: %d" % sum(INPUT))
current = 0
seen = set()
while True:
for offset in INPUT:
current += offset
if current in seen:
print("Saw %d twice!" % current)
exit()
else:
seen.add(current) | 242.529412 | 3,718 | 0.455736 | 1,085 | 4,123 | 1.731797 | 0.086636 | 0.010644 | 0.00958 | 0.007451 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.41175 | 0.033956 | 4,123 | 17 | 3,719 | 242.529412 | 0.060005 | 0.021101 | 0 | 0 | 0 | 0.076923 | 0.927594 | 0.919632 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.153846 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bdd1faabd5acd54d8332a39723ad0e1ae4388424 | 81 | py | Python | work_space/XavierWorkspace/app.py | RAXR-Capstone/project_danger_zone | ac117c91e70415346433fef8d93dd7d1a6f27a95 | [
"CECILL-B"
] | null | null | null | work_space/XavierWorkspace/app.py | RAXR-Capstone/project_danger_zone | ac117c91e70415346433fef8d93dd7d1a6f27a95 | [
"CECILL-B"
] | null | null | null | work_space/XavierWorkspace/app.py | RAXR-Capstone/project_danger_zone | ac117c91e70415346433fef8d93dd7d1a6f27a95 | [
"CECILL-B"
] | null | null | null | import streamlit
from predict_page import show_predict_page
show_predict_page() | 20.25 | 42 | 0.876543 | 12 | 81 | 5.5 | 0.5 | 0.5 | 0.454545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098765 | 81 | 4 | 43 | 20.25 | 0.90411 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
da2c92624f0c82c7013de1c58c8810eb4129c6bb | 105 | py | Python | porebuilder/__init__.py | mattwthompson/Pore-Builder | 8572919386758053a076a55d0786dcb3b5f32a3c | [
"MIT"
] | null | null | null | porebuilder/__init__.py | mattwthompson/Pore-Builder | 8572919386758053a076a55d0786dcb3b5f32a3c | [
"MIT"
] | null | null | null | porebuilder/__init__.py | mattwthompson/Pore-Builder | 8572919386758053a076a55d0786dcb3b5f32a3c | [
"MIT"
] | null | null | null | from porebuilder.porebuilder import GraphenePoreSolvent
from porebuilder.porebuilder import GraphenePore
| 35 | 55 | 0.904762 | 10 | 105 | 9.5 | 0.5 | 0.315789 | 0.547368 | 0.673684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.07619 | 105 | 2 | 56 | 52.5 | 0.979381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
da841e5ad155f365d283e0e2baa2c1afd9a5824a | 141 | py | Python | trading_bot/data/__init__.py | barberogaston/trading-bot | ce8f98f4b10f2690b578824f9a5a7eaed9ec382c | [
"MIT"
] | null | null | null | trading_bot/data/__init__.py | barberogaston/trading-bot | ce8f98f4b10f2690b578824f9a5a7eaed9ec382c | [
"MIT"
] | null | null | null | trading_bot/data/__init__.py | barberogaston/trading-bot | ce8f98f4b10f2690b578824f9a5a7eaed9ec382c | [
"MIT"
] | null | null | null | import os
def get_data_folder_path() -> str:
"""Returns the data folder path."""
return os.path.abspath(os.path.dirname(__file__))
| 20.142857 | 53 | 0.695035 | 21 | 141 | 4.333333 | 0.666667 | 0.21978 | 0.307692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.163121 | 141 | 6 | 54 | 23.5 | 0.771186 | 0.205674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
16f9aa8f812e20a7bc358b90d7315c8b5f974da8 | 8,736 | py | Python | Settings/kmp_menu_settings.py | kergalym/Korlan | cc3141969d21898842a008b49f8b42492d2cf6e4 | [
"MIT"
] | 3 | 2019-09-17T15:26:42.000Z | 2021-12-09T00:42:32.000Z | Settings/kmp_menu_settings.py | kergalym/Korlan | cc3141969d21898842a008b49f8b42492d2cf6e4 | [
"MIT"
] | null | null | null | Settings/kmp_menu_settings.py | kergalym/Korlan | cc3141969d21898842a008b49f8b42492d2cf6e4 | [
"MIT"
] | 1 | 2019-09-17T13:21:31.000Z | 2019-09-17T13:21:31.000Z | from Settings.menu_settings import MenuSettings
class Keymap(MenuSettings):
def __init__(self):
self.settings = None
MenuSettings.__init__(self)
def set_key_forward(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['forward'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_backward(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['backward'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_left(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['left'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_right(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['right'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_crouch(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['crouch'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_jump(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['jump'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_use(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['use'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_attack(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['attack'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_h_attack(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['h_attack'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_f_attack(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['f_attack'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_block(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['block'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_sword(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['sword'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_bow(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['bow'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_tengri(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['tengri'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_key_umay(self, data):
if self.load_settings() and self.str_input_validate_keymap(data):
loaded_settings = self.load_settings()
if self.duplicate_key_check(data, loaded_settings) is not None:
data = self.duplicate_key_check(data, loaded_settings)
loaded_settings['Keymap']['umai'] = self.str_input_validate_keymap(data)
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file)
def set_default_keymap(self):
if self.load_settings():
loaded_settings = self.load_settings()
loaded_settings['Keymap']['forward'] = 'W'
loaded_settings['Keymap']['backward'] = 'S'
loaded_settings['Keymap']['left'] = 'A'
loaded_settings['Keymap']['right'] = 'D'
loaded_settings['Keymap']['crouch'] = 'C'
loaded_settings['Keymap']['jump'] = 'spacebar'
loaded_settings['Keymap']['use'] = 'E'
loaded_settings['Keymap']['attack'] = 'MOUSE1'
loaded_settings['Keymap']['h_attack'] = 'H'
loaded_settings['Keymap']['f_attack'] = 'F'
loaded_settings['Keymap']['block'] = 'MOUSE2'
loaded_settings['Keymap']['sword'] = '1'
loaded_settings['Keymap']['bow'] = '2'
loaded_settings['Keymap']['tengri'] = '3'
loaded_settings['Keymap']['umai'] = '4'
with open(self.cfg_path, "w") as cfg_file:
loaded_settings.write(cfg_file) | 53.595092 | 92 | 0.643201 | 1,102 | 8,736 | 4.764065 | 0.056261 | 0.245333 | 0.154286 | 0.114286 | 0.888381 | 0.862476 | 0.862476 | 0.862476 | 0.862476 | 0.862476 | 0 | 0.00092 | 0.253434 | 8,736 | 163 | 93 | 53.595092 | 0.804048 | 0 | 0 | 0.641379 | 0 | 0 | 0.044867 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117241 | false | 0 | 0.006897 | 0 | 0.131034 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e5174857638297ea7692011ae5c3e9032d1f33c0 | 8,448 | py | Python | source/tests/test_transform.py | couling/XSBE | d307fb06a01b6169a28756dbb8397184b48ec9db | [
"MIT"
] | null | null | null | source/tests/test_transform.py | couling/XSBE | d307fb06a01b6169a28756dbb8397184b48ec9db | [
"MIT"
] | 1 | 2020-12-31T08:24:16.000Z | 2020-12-31T08:24:16.000Z | source/tests/test_transform.py | couling/XSBE | d307fb06a01b6169a28756dbb8397184b48ec9db | [
"MIT"
] | 1 | 2020-12-30T19:05:49.000Z | 2020-12-30T19:05:49.000Z | import unittest
import datetime
from xsbe import simple_node
from xsbe import transform
class Transform(unittest.TestCase):
def test_flatten(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<person id="20" xsbe:type="flatten">
<name>Philip</name>
</person>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<person id="21">
<name>Alan</name>
</person>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
data = parser.transform_from_xml(doc_node)
self.assertDictEqual(
data,
{
'id': 21,
'name': 'Alan'
}
)
def test_repeating(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<people>
<person xsbe:type="repeating" xsbe:name="people">Philip</person>
</people>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<people>
<person>Alan</person>
<person>Also Alan</person>
</people>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
data = parser.transform_from_xml(doc_node)
self.assertDictEqual(
data,
{'people': ['Alan', 'Also Alan']}
)
def test_repeating_flatten(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<people>
<person id="20" xsbe:type="repeating" xsbe:name="people">
<name>Philip</name>
</person>
</people>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<people>
<person id="21">
<name>Alan</name>
</person>
<person id="22">
<name>Also Alan</name>
</person>
</people>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
data = parser.transform_from_xml(doc_node)
self.assertDictEqual(
data,
{
'people': [
{
'name': 'Alan',
'id': 21
},
{
'name': 'Also Alan',
'id': 22
}
]
}
)
def test_friendly_name(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<people>
<person name="Philip" xsbe:value-from="name"/>
</people>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<people>
<person name="Alan"/>
</people>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
data = parser.transform_from_xml(doc_node)
self.assertDictEqual(
data,
{'people': 'Alan'}
)
def test_friendly_name_duplicates_error(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<people>
<person name="Philip" xsbe:value-from="name"/>
</people>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<people>
<person name="Alan"/>
<person name="Also Alan"/>
</people>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
self.assertRaises(
transform.DuplicateElement,
parser.transform_from_xml(doc_node)
)
class TransformDataTypesInference(unittest.TestCase):
def test_int(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<person xsbe:type="flatten">
<value>27</value>
</person>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<person>
<value>28</value>
</person>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
data = parser.transform_from_xml(doc_node)
self.assertDictEqual(
data,
{
'value': 28,
}
)
def test_int_catch_error(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<person xsbe:type="flatten">
<value>27</value>
</person>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<person>
<value>lorem ipsum dolor sit amet</value>
</person>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
self.assertRaises(
ValueError,
parser.transform_from_xml(doc_node)
)
def test_float(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<person xsbe:type="flatten">
<value>3.14159</value>
</person>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<person>
<value>1.41421356237</value>
</person>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
data = parser.transform_from_xml(doc_node)
self.assertDictEqual(
data,
{
'value': 1.41421356237,
}
)
def test_string(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<person xsbe:type="flatten">
<value>lorem ipsum dolor sit amet</value>
</person>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<person>
<value>+44012345678910</value>
</person>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
data = parser.transform_from_xml(doc_node)
self.assertDictEqual(
data,
{
'value': '+44012345678910'
}
)
def test_date(self):
schema = """
<xsbe:schema-by-example xmlns:xsbe="http://xsbe.couling.uk">
<xsbe:root>
<person xsbe:type="flatten">
<value>2020-12-30</value>
</person>
</xsbe:root>
</xsbe:schema-by-example>
"""
document = """
<person>
<value>2020-12-31</value>
</person>
"""
parser = transform.create_transformer(
simple_node.loads(schema),
ignore_unexpected=True
)
doc_node = simple_node.loads(document)
data = parser.transform_from_xml(doc_node)
self.assertDictEqual(
data,
{
'value': datetime.date(2020, 12, 31)
}
)
if __name__ == '__main__':
unittest.main()
| 25.445783 | 78 | 0.48035 | 755 | 8,448 | 5.234437 | 0.101987 | 0.053138 | 0.060729 | 0.096154 | 0.845901 | 0.841852 | 0.811488 | 0.801872 | 0.790739 | 0.778846 | 0 | 0.020699 | 0.393821 | 8,448 | 331 | 79 | 25.522659 | 0.751025 | 0 | 0 | 0.707317 | 0 | 0 | 0.453835 | 0.08937 | 0 | 0 | 0 | 0 | 0.034843 | 1 | 0.034843 | false | 0 | 0.013937 | 0 | 0.055749 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e53fe0d9c8ce38dc3ce998bac64ae65d115500cc | 118 | py | Python | platform/hwconf_data/efm32gg11b/PythonSnippet/__init__.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | null | null | null | platform/hwconf_data/efm32gg11b/PythonSnippet/__init__.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | 1 | 2020-08-25T02:36:22.000Z | 2020-08-25T02:36:22.000Z | platform/hwconf_data/efm32gg11b/PythonSnippet/__init__.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | 1 | 2020-08-25T01:56:04.000Z | 2020-08-25T01:56:04.000Z | from efm32gg11b.halconfig import halconfig_types as types
from efm32gg11b.halconfig import halconfig_dependency as dep | 59 | 60 | 0.889831 | 16 | 118 | 6.4375 | 0.5 | 0.271845 | 0.446602 | 0.563107 | 0.737864 | 0 | 0 | 0 | 0 | 0 | 0 | 0.074766 | 0.09322 | 118 | 2 | 60 | 59 | 0.88785 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
f91d542f8f0110bed08a91038037f7c871ee0239 | 6,627 | py | Python | bayesfit/test_checkLogspace.py | LunkRat/bayesfit | aaef3ba013e3ebaf596c2c26baf88b1112b5f73a | [
"Apache-2.0"
] | 44 | 2017-10-03T20:22:04.000Z | 2022-03-16T23:15:19.000Z | bayesfit/test_checkLogspace.py | hoechenberger/bayesfit | cc76e474dfc402c81dd9a85f31ed886350c4f491 | [
"Apache-2.0"
] | 8 | 2018-09-24T16:57:36.000Z | 2021-09-22T18:24:13.000Z | bayesfit/test_checkLogspace.py | hoechenberger/bayesfit | cc76e474dfc402c81dd9a85f31ed886350c4f491 | [
"Apache-2.0"
] | 9 | 2017-11-11T22:48:03.000Z | 2020-10-22T16:02:29.000Z | """
*******************************************************
*
* test_checkLogspace - UNIT TEST FOR TRAVIS CI
*
* License: Apache 2.0
* Written by: Michael Slugocki
* Created on: April 28, 2017
* Last updated: September 13, 2018
*
*******************************************************
"""
#################################################################
# IMPORT MODULES
#################################################################
import numpy as np
from .checkLogspace import check_logspace as _check_logspace
#################################################################
# DEFINE FUNCTIONS USED FOR UNIT TESTING
#################################################################
def _logspace_arg():
# Test cases for logspace is None
logspace = 'Not an options'
# Generate dataset
data = np.array([[0.0010, 45.0000, 90.0000],
[0.0015, 50.0000, 90.0000],
[0.0020, 44.0000, 90.0000],
[0.0025, 44.0000, 90.0000],
[0.0030, 52.0000, 90.0000],
[0.0035, 53.0000, 90.0000],
[0.0040, 62.0000, 90.0000],
[0.0045, 64.0000, 90.0000],
[0.0050, 76.0000, 90.0000],
[0.0060, 79.0000, 90.0000],
[0.0070, 88.0000, 90.0000],
[0.0080, 90.0000, 90.0000],
[0.0100, 90.0000, 90.0000]]);
# Define sigmoid type
sigmoid_type = 'weibull'
# Call function with arguments above
_check_logspace(data, logspace, sigmoid_type, batch = False)
# Update success flag
success = 1
return success
def _logspace_none(branch):
# Test cases for logspace is None
logspace = None
# Generate dataset
data = np.array([[0.0010, 45.0000, 90.0000],
[0.0015, 50.0000, 90.0000],
[0.0020, 44.0000, 90.0000],
[0.0025, 44.0000, 90.0000],
[0.0030, 52.0000, 90.0000],
[0.0035, 53.0000, 90.0000],
[0.0040, 62.0000, 90.0000],
[0.0045, 64.0000, 90.0000],
[0.0050, 76.0000, 90.0000],
[0.0060, 79.0000, 90.0000],
[0.0070, 88.0000, 90.0000],
[0.0080, 90.0000, 90.0000],
[0.0100, 90.0000, 90.0000]]);
# Run through possible error types
if branch == 0:
# Define sigmoid type
sigmoid_type = 'weibull'
elif branch == 1:
# Define sigmoid type
sigmoid_type = 'weibull'
# Add negative number to raise exception
data[0,0] = -0.1
elif branch == 2:
# Define sigmoid type
sigmoid_type = 'logistic'
# Call function with arguments above
_check_logspace(data, logspace, sigmoid_type, batch = False)
# Update success flag
success = 1
return success
def _logspace_true(branch):
# Test cases for logspace is None
logspace = True
# Generate dataset
data = np.array([[0.0010, 45.0000, 90.0000],
[0.0015, 50.0000, 90.0000],
[0.0020, 44.0000, 90.0000],
[0.0025, 44.0000, 90.0000],
[0.0030, 52.0000, 90.0000],
[0.0035, 53.0000, 90.0000],
[0.0040, 62.0000, 90.0000],
[0.0045, 64.0000, 90.0000],
[0.0050, 76.0000, 90.0000],
[0.0060, 79.0000, 90.0000],
[0.0070, 88.0000, 90.0000],
[0.0080, 90.0000, 90.0000],
[0.0100, 90.0000, 90.0000]]);
# Run through possible error types
if branch == 0:
# Define sigmoid type
sigmoid_type = 'weibull'
elif branch == 1:
# Define sigmoid type
sigmoid_type = 'weibull'
# Add negative number to raise exception
data[0,0] = -0.1
# Call function with arguments above
_check_logspace(data, logspace, sigmoid_type, batch = False)
# Update success flag
success = 1
return success
def _logspace_false():
# Test cases for logspace is None
logspace = False
# Generate dataset
data = np.array([[0.0010, 45.0000, 90.0000],
[0.0015, 50.0000, 90.0000],
[0.0020, 44.0000, 90.0000],
[0.0025, 44.0000, 90.0000],
[0.0030, 52.0000, 90.0000],
[0.0035, 53.0000, 90.0000],
[0.0040, 62.0000, 90.0000],
[0.0045, 64.0000, 90.0000],
[0.0050, 76.0000, 90.0000],
[0.0060, 79.0000, 90.0000],
[0.0070, 88.0000, 90.0000],
[0.0080, 90.0000, 90.0000],
[0.0100, 90.0000, 90.0000]]);
# Define sigmoid type
sigmoid_type = 'weibull'
# Call function with arguments above
_check_logspace(data, logspace, sigmoid_type, batch = False)
# Update success flag
success = 1
return success
#################################################################
# UNIT TESTS
#################################################################
def test_logspace_arg():
raised = False
try:
_logspace_arg(0)
except:
raised = True
# Assert if exception flag is not raised
assert raised is True
def test_logspace_none_branch0():
success = _logspace_none(0)
# Assert if exception
assert success == 1
def test_logspace_none_branch1():
raised = False
try:
_logspace_none(1)
except:
raised = True
# Assert if exception flag is not raised
assert raised is True
def test_logspace_none_branch2():
success = _logspace_none(2)
# Assert if exception
assert success == 1
def test_logspace_true_branch0():
success = _logspace_true(0)
# Assert if exception
assert success == 1
def test_logspace_true_branch1():
raised = False
try:
_logspace_true(1)
except:
raised = True
# Assert if exception flag is not raised
assert raised is True
def test_logspace_false():
success = _logspace_false()
# Assert if exception
assert success == 1
| 33.639594 | 65 | 0.462955 | 728 | 6,627 | 4.126374 | 0.153846 | 0.11984 | 0.173103 | 0.175766 | 0.842543 | 0.813915 | 0.803595 | 0.780959 | 0.754328 | 0.754328 | 0 | 0.220327 | 0.36306 | 6,627 | 196 | 66 | 33.811224 | 0.491353 | 0.195564 | 0 | 0.785124 | 0 | 0 | 0.013075 | 0 | 0 | 0 | 0 | 0 | 0.057851 | 1 | 0.090909 | false | 0 | 0.016529 | 0 | 0.140496 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
dab60f7e6be6d479b745e1f1b8a8221e2ac5ea6d | 103 | py | Python | pages/regular_exp/__init__.py | robsonzagrejr/pytomato | 3da3d9557f398a7ce2f3f8741c7cd70de9bfe05f | [
"MIT"
] | 2 | 2021-02-25T14:29:13.000Z | 2021-04-12T02:53:55.000Z | pages/regular_exp/__init__.py | robsonzagrejr/pytomato | 3da3d9557f398a7ce2f3f8741c7cd70de9bfe05f | [
"MIT"
] | null | null | null | pages/regular_exp/__init__.py | robsonzagrejr/pytomato | 3da3d9557f398a7ce2f3f8741c7cd70de9bfe05f | [
"MIT"
] | null | null | null | from pages.regular_exp.callbacks import register_callbacks
from pages.regular_exp.layout import layout
| 34.333333 | 58 | 0.883495 | 15 | 103 | 5.866667 | 0.533333 | 0.204545 | 0.363636 | 0.431818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.07767 | 103 | 2 | 59 | 51.5 | 0.926316 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
dac4f28a802cfb94242b0075801ca69e2d7f4f14 | 5,894 | py | Python | src/models/dilated_cnn.py | louisenaud/stock_prediction | 2d35813c09e733acad33dc62972cd90a36d107c4 | [
"MIT"
] | 25 | 2019-04-25T13:18:00.000Z | 2022-03-26T15:18:04.000Z | src/models/dilated_cnn.py | louisenaud/stock_prediction | 2d35813c09e733acad33dc62972cd90a36d107c4 | [
"MIT"
] | null | null | null | src/models/dilated_cnn.py | louisenaud/stock_prediction | 2d35813c09e733acad33dc62972cd90a36d107c4 | [
"MIT"
] | 15 | 2018-08-09T22:15:41.000Z | 2021-09-30T15:58:52.000Z | """
Project: stock_prediction
File: dilated_cnn.py
Created by: louise
On: 20/02/18
At: 1:42 PM
"""
from torch import nn
class DilatedNet(nn.Module):
def __init__(self, num_securities=5, hidden_size=64, dilation=2, T=10):
"""
:param num_securities: int, number of stocks
:param hidden_size: int, size of hidden layers
:param dilation: int, dilation value
:param T: int, number of look back points
"""
super(DilatedNet, self).__init__()
self.dilation = dilation
self.hidden_size = hidden_size
# First Layer
# Input
self.dilated_conv1 = nn.Conv1d(num_securities, hidden_size, kernel_size=2, dilation=self.dilation)
self.relu1 = nn.ReLU()
# Layer 2
self.dilated_conv2 = nn.Conv1d(hidden_size, hidden_size, kernel_size=1, dilation=self.dilation)
self.relu2 = nn.ReLU()
# Layer 3
self.dilated_conv3 = nn.Conv1d(hidden_size, hidden_size, kernel_size=1, dilation=self.dilation)
self.relu3 = nn.ReLU()
# Layer 4
self.dilated_conv4 = nn.Conv1d(hidden_size, hidden_size, kernel_size=1, dilation=self.dilation)
self.relu4 = nn.ReLU()
# Output layer
self.conv_final = nn.Conv1d(hidden_size, num_securities, kernel_size=1)
self.T = T
def forward(self, x):
"""
:param x: Pytorch Variable, batch_size x n_stocks x T
:return:
"""
# First layer
out = self.dilated_conv1(x)
out = self.relu1(out)
# Layer 2:
out = self.dilated_conv2(out)
out = self.relu2(out)
# Layer 3:
out = self.dilated_conv3(out)
out = self.relu3(out)
# Layer 4:
out = self.dilated_conv4(out)
out = self.relu4(out)
# Final layer
out = self.conv_final(out)
out = out[:, :, -1]
return out
class DilatedNet2D(nn.Module):
def __init__(self, hidden_size=64, dilation=1, T=10):
"""
:param hidden_size: int, size of hidden layers
:param dilation: int, dilation value in the time dimension (1 for the other dimension, aka between the stocks)
:param T: int, number of look back points
"""
super(DilatedNet2D, self).__init__()
self.dilation = dilation
self.hidden_size = hidden_size
# First Layer
# Input
self.dilated_conv1 = nn.Conv2d(1, hidden_size, kernel_size=(1, 2), dilation=(1, self.dilation))
self.relu1 = nn.ReLU()
# Layer 2
self.dilated_conv2 = nn.Conv2d(hidden_size, hidden_size, kernel_size=(1, 2), dilation=(1, self.dilation))
self.relu2 = nn.ReLU()
# Layer 3
self.dilated_conv3 = nn.Conv2d(hidden_size, hidden_size, kernel_size=(1, 2), dilation=(1, self.dilation))
self.relu3 = nn.ReLU()
# Layer 4
self.dilated_conv4 = nn.Conv2d(hidden_size, hidden_size, kernel_size=(1, 2), dilation=(1, self.dilation))
self.relu4 = nn.ReLU()
# Output layer
self.conv_final = nn.Conv2d(hidden_size, 1, kernel_size=(1, 2))
self.T = T
def forward(self, x):
"""
:param x: Pytorch Variable, batch_size x 1 x n_stocks x T
:return:
"""
# First layer
out = self.dilated_conv1(x)
out = self.relu1(out)
# Layer 2:
out = self.dilated_conv2(out)
out = self.relu2(out)
# Layer 3:
out = self.dilated_conv3(out)
out = self.relu3(out)
# Layer 4:
out = self.dilated_conv4(out)
out = self.relu4(out)
# Final layer
out = self.conv_final(out)
out = out[:, :, :, -1]
return out
class DilatedNet2DMultistep(nn.Module):
def __init__(self, num_securities=5, n_in=20, n_out=3, hidden_size=64, dilation=1, T=10):
"""
:param num_securities:
:param n_in: number of time points in the input
:param n_out: number of time points in the output
:param hidden_size: int
:param dilation: int
:param T: int, length of lookback
"""
super(DilatedNet2DMultistep, self).__init__()
self.n_out = n_out
self.n_in = n_in
self.dilation = dilation
self.hidden_size = hidden_size
# First Layer
# Input
self.dilated_conv1 = nn.Conv2d(1, hidden_size, kernel_size=(1, 2), dilation=(1, self.dilation)) # dilation in
# the time dimension
self.relu1 = nn.ReLU()
# Layer 2
self.dilated_conv2 = nn.Conv2d(hidden_size, hidden_size, kernel_size=(1, 2), dilation=(1, self.dilation))
self.relu2 = nn.ReLU()
# Layer 3
self.dilated_conv3 = nn.Conv2d(hidden_size, hidden_size, kernel_size=(1, 2), dilation=(1, self.dilation))
self.relu3 = nn.ReLU()
# Layer 4
self.dilated_conv4 = nn.Conv2d(hidden_size, hidden_size, kernel_size=(1, 2), dilation=(1, self.dilation))
self.relu4 = nn.ReLU()
# Output layer
self.conv_final = nn.Conv2d(hidden_size, 1, kernel_size=(1, 2))
self.T = T
def forward(self, x):
"""
:param x: Pytorch Variable, batch_size x 1 x T x n_stocks
:return:
"""
# First layer
out = self.dilated_conv1(x)
out = self.relu1(out)
# Layer 2:
out = self.dilated_conv2(out)
out = self.relu2(out)
# Layer 3:
out = self.dilated_conv3(out)
out = self.relu3(out)
# Layer 4:
out = self.dilated_conv4(out)
out = self.relu4(out)
# Final layer
out = self.conv_final(out)
out = out[:, :, :, -self.n_out:]
return out
| 28.61165 | 120 | 0.570241 | 767 | 5,894 | 4.215124 | 0.116037 | 0.111352 | 0.047634 | 0.074234 | 0.831735 | 0.815342 | 0.801114 | 0.801114 | 0.764306 | 0.742035 | 0 | 0.038854 | 0.318799 | 5,894 | 205 | 121 | 28.75122 | 0.766376 | 0.209196 | 0 | 0.761905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.011905 | 0 | 0.154762 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
daeacce2b6d5ace3f72051db3b620638fa96aa54 | 45,310 | py | Python | Projects/Clustering-classification approach for human activity detection using smart phone dataset/classification_approach_for_human_activity_detection_using_smart_phone_dataset.py | shyammarjit/CS-306-Machine-Learning | 77d2ebbd067bb1460f115c8a7099a88218932da7 | [
"MIT"
] | 9 | 2021-10-03T06:03:50.000Z | 2021-10-31T13:42:03.000Z | Projects/Clustering-classification approach for human activity detection using smart phone dataset/classification_approach_for_human_activity_detection_using_smart_phone_dataset.py | shyammarjit/Machine-Learning | 77d2ebbd067bb1460f115c8a7099a88218932da7 | [
"MIT"
] | null | null | null | Projects/Clustering-classification approach for human activity detection using smart phone dataset/classification_approach_for_human_activity_detection_using_smart_phone_dataset.py | shyammarjit/Machine-Learning | 77d2ebbd067bb1460f115c8a7099a88218932da7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Classification approach for human activity detection using smart phone Dataset.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1k2eGF5VY4KfY8P-CDBSERVsf0XF2-LHb
# Data link
https://archive.ics.uci.edu/ml/datasets/human+activity+recognition+using+smartphones<br/>
[click](https://github.com/MadhavShashi/Human-Activity-Recognition-Using-Smartphones-Sensor-DataSet/blob/master/README.md#problem-statement)
# Data Description
Total classes for clustering<br/>
1. WALKING<br/>
2. WALKING_UPSTAIRS<br/>
3. WALKING_DOWNSTAIRS<br/>
4. SITTING<br/>
5. STANDING<br/>
6. LAYING<br/>
<br/>
# Importing Python Libraries
"""
# import pandas module
import pandas as pd
import numpy as np
from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression, SGDClassifier
from sklearn.multiclass import OneVsRestClassifier
from sklearn.utils import shuffle
from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import train_test_split
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.model_selection import KFold
from sklearn.metrics import confusion_matrix, precision_score, recall_score, f1_score, accuracy_score, classification_report
import warnings
warnings.filterwarnings('ignore')
import plotly.express as px
# load the dataset from the google drive
from google.colab import drive
drive.mount('/content/drive')
"""# Feature Description"""
# get the name of the features from the file (features.txt)
features = list()
with open('/content/drive/MyDrive/UCI HAR Dataset/features.txt') as f:
features = [line.split()[1] for line in f.readlines()]
print(f'No of Features: {len(features)}')
print("Name of the features:\n",features)
"""**For train (for plotting)**"""
# get the data from txt files to pandas dataffame
X_train = pd.read_csv('/content/drive/MyDrive/UCI HAR Dataset/train/X_train.txt', delim_whitespace=True, header=None)
X_train.columns = features
y_train = pd.read_csv('/content/drive/MyDrive/UCI HAR Dataset/train/y_train.txt', names=['Activity'], squeeze=True)
y_train_labels = y_train.map({1: 'WALKING', 2:'WALKING_UPSTAIRS',3:'WALKING_DOWNSTAIRS', 4:'SITTING', 5:'STANDING',6:'LAYING'})
# put all columns in a single dataframe
train = X_train.copy()
train['subject'] = pd.read_csv('/content/drive/MyDrive/UCI HAR Dataset/train/subject_train.txt', header=None, squeeze=True)
train['Activity'] = y_train
train['ActivityName'] = y_train_labels
"""**For Test (for plotting)**"""
# get the data from txt files to pandas dataffame
X_test = pd.read_csv('/content/drive/MyDrive/UCI HAR Dataset/test/X_test.txt', delim_whitespace=True, header=None)
y_test = pd.read_csv('/content/drive/MyDrive/UCI HAR Dataset/test/y_test.txt', names=['Activity'], squeeze=True)
y_test_labels = y_test.map({1: 'WALKING', 2:'WALKING_UPSTAIRS',3:'WALKING_DOWNSTAIRS', 4:'SITTING', 5:'STANDING',6:'LAYING'})
# put all columns in a single dataframe
test = X_test.copy()
test['subject'] = pd.read_csv('/content/drive/MyDrive/UCI HAR Dataset/test/subject_test.txt', header=None, squeeze=True)
test['Activity'] = y_test
test['ActivityName'] = y_test_labels
# for plotting only
columns = train.columns
columns = list(map(lambda a: a.replace('()', ''), columns))
columns = list(map(lambda a: a.replace('-',''), columns))
columns = list(map(lambda a: a.replace(',',''), columns))
train.columns = columns
test.columns = columns
train.to_csv('/content/drive/MyDrive/UCI HAR Dataset/train.csv', index=False)
test.to_csv('/content/drive/MyDrive/UCI HAR Dataset/test.csv', index=False)
"""**For doing classification**<br/>
opening CSV File
"""
try:
# creating a data frame
X_train = pd.read_csv("/content/drive/MyDrive/X_train.csv", header = None)
y_train = pd.read_csv("/content/drive/MyDrive/y_train.csv", header = None)
X_test = pd.read_csv("/content/drive/MyDrive/X_test.csv", header = None)
y_test = pd.read_csv("/content/drive/MyDrive/y_test.csv", header = None)
# add X_train and X_test in one dataframe for the K fold cross validation
X = pd.concat([X_train, X_test], ignore_index=True)
# add y_train and y_test in one CSV file for the K fold cross validation
y = pd.concat([y_train, y_test], ignore_index=True)
except Exception as e:
print("An exception occurred:")
print(e)
"""# Data Visualization"""
# No of classes
print("Different Activity labels in dataset: ", y[0].unique())
"""**Is there any class Imbalance issue in the datset ?**"""
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('whitegrid')
plt.rcParams['font.family'] = 'Dejavu Sans'
plt.figure(figsize=(18,13))
plt.title('Data provided by each of 30 user', fontsize=20)
sns.countplot(x='subject',hue='ActivityName', data = train)
plt.savefig("Data provided by each of 30 user.pdf")
plt.show()
plt.figure(figsize=(12,8))
sns.set()
sns.countplot(train.ActivityName, palette ='hls')
plt.xticks(rotation=90)
plt.savefig("Activity label.pdf")
plt.show()
"""Class Imbalance: A issue in the dataset, it occures when our class distributions are highly imbalanced
i.e. when no of patterns present in one class is much higher/lower than no of
patterns present in another class.
As the class distributions of differnent Activity labels are nearby equally distributed,
so there is no class imbalanced issue in the UCI HAR Data Set.
**Is there any Data Scarcity issue in the dataset ?**<br/>
Note: Data scarcity is when a) there is limited amount or a complete lack of labeled training data, or b) lack of data for a given label compared to the other labels (a.k.a data imbalance).
"""
print("No of labelled training patterns/samples (in one training fold as we are using 5 fold cross validation):\n", int((X.shape[0]*4)/5))
print("No of labelled testing patterns/samples (in one testing fold as we are using 5 fold cross validation):\n", int(X.shape[0]/5))
"""No of labelled training patterns/samples (in one training fold as we are using 5 fold cross validation): 8239
As there is no lack of labeled training data, so there is no Data Scarcity issue in the dataset.
# Feature Scaling (Min-Max Normalization)<br/>
$X'$ = $\frac{X - min(X)}{max(X) - min(X)}$
"""
# dataframe to array
X = np.array(X)
y = np.array(y)
# Noramalize the X
scaler = MinMaxScaler()
scaler.fit(X)
X = scaler.transform(X)
"""# Shuffel the Data Set"""
# shuffel the dataset
X, y = shuffle(X, y, random_state=100)
print("Shape of dataset(input features):", X.shape)
print("Shape of y(input features):", y.shape)
"""# K-Fold Cross Validation (K=5)
We have used 5-fold to increase the generalization of the model (in terms of more precise accuracy claiming) and also to increse the robustness of the model.<br/>
In the 5 fold cross validation every sample will go 1 time for testing and 4 times for training of the model.
"""
def fold(features, y_actual):
"""
INPUT
features: 2D array conatins the dataset or input of the model, shape = (no of patterns, no of features)
y_actual: 1D array conatins the actual class label of the data sample, shape = (no of patterns, )
OUTPUT
all_x_train: 2D array conatains training fold (fold no, feature training data)
all_x_test: 2D array conatains training fold (fold no, feature testing data)
all_y_train: 2D array conatains training fold (fold no, labeled training data)
all_y_test: 2D array conatains training fold (fold no, labeled tesing data)
"""
kf = KFold(n_splits = 5, random_state = 1000, shuffle = True)
kf.get_n_splits(features)
all_x_train, all_x_test, all_y_train, all_y_test = [], [], [], []
for train_index, test_index in kf.split(features):
X_train, X_test = features[train_index], features[test_index]
y_train, y_test = y_actual[train_index], y_actual[test_index]
all_x_train.append(X_train)
all_x_test.append(X_test)
all_y_train.append(y_train)
all_y_test.append(y_test)
all_x_train, all_x_test, all_y_train, all_y_test = np.array(all_x_train), np.array(all_x_test), np.array(all_y_train), np.array(all_y_test)
for i in range(0, 5):
all_y_train[i] = all_y_train[i].flatten()
return all_x_train, all_x_test, all_y_train, all_y_test
all_x_train, all_x_test, all_y_train, all_y_test = fold(X, y)
"""# 1. Classification"""
"""
Creating the validation set to get the best hyperparameter.
We are dividing the first training fold into two segments
that are training and validation into an 80:20 ratio.
Best Hyperparameter will be chosen best on which
pair of hyperparameters has the maximum accuracy on the validation set.
"""
X_train, X_validation, y_train, y_validation = train_test_split(all_x_train[0], all_y_train[0], test_size=0.20, random_state=42)
"""# Using inbuild Logistic Regression (SGD) classifier<br/>
Using One vs ALL/REST
"""
# hyperparameter tuning for logistic accuracy
def logistic_hyperparameter_tuning(epoch, alpha, roh, n_iter_no_change, X_train, X_validation, y_train, y_validation):
"""
INPUT
epoch: 1D int arary contains different values of epochs (hyperparameter)
alpha: 1D float array contains different values of alphas or learning rates (hyperparameter)
roh: 1D float array contains different values of tolerence or roh (hyperparameter)
n_iter_no_change: 1D int array conatins different values of
Number of iterations with no improvement to wait before stopping fitting (hyperparameter).
X_train: 2D array of shape = (no of patterns, no of features)
X_validation: 2D array of shape = (no of patterns, no of features)
y_train: 2D array of shape = (no of patterns, )
y_validation: 2D array of shape = (no of patterns, )
OUTPUT
best_hyperparameter: tuple (epoch, alpha, roh, n_iter_no_change) which has best accuracy on the validation set.
"""
val_acc = []
for i in range(0, epoch.shape[0]):
# we are taking logloss function for error calculation
logistic_reg_classifier = OneVsRestClassifier(SGDClassifier(loss = 'log', alpha = alpha[i], fit_intercept = True, max_iter = epoch[i], tol = roh[i], n_iter_no_change = n_iter_no_change[i])).fit(X_train, y_train)
predicted = logistic_reg_classifier.predict(X_validation)
val_acc.append(accuracy_score(y_validation, predicted)*100)
# Get the maximum accuracy on validation
max_value = max(val_acc)
max_index = val_acc.index(max_value)
best_hyperparameter = (epoch[max_index], alpha[max_index], roh[max_index], n_iter_no_change[max_index])
print("Best Hyperparameter:")
print("Epoch = ", epoch[max_index])
print("Alpha = ", alpha[max_index])
print("Roh = ", roh[max_index])
print("n_iter_no_change (Number of iterations with no improvement) = ", n_iter_no_change[max_index])
return best_hyperparameter
epoch = np.array([100, 150, 200, 250, 300, 350, 400, 450, 500, 550])
alpha = np.array([0.01, 0.001, 0.0001, 0.00001, 0.125, 0.15, 0.18, 0.2, 0.25, 0.3])
roh = np.array([0.00001, 0.00001, 0.000001, 0.0000001, 0.000001, 0.0001, 0.0001, 0.0001, 0.0001, 0.000001])
n_iter_no_change = np.array([8, 9, 10, 11, 12, 13, 14, 15, 16, 18])
epoch, alpha, roh, n_iter_no_change = logistic_hyperparameter_tuning(epoch, alpha, roh, n_iter_no_change, X_train, X_validation, y_train, y_validation)
for i in range(0, 5): # for 5 fold
print("For fold no:", i+1)
print("-"*100)
logistic_regression = OneVsRestClassifier(SGDClassifier(loss = 'log', alpha = alpha, fit_intercept = True,\
max_iter = epoch, tol = roh, n_iter_no_change = n_iter_no_change,\
verbose= False))
logistic_regression.fit(all_x_train[i], all_y_train[i])
print("Accuracy on training data: " + str(logistic_regression.score(all_x_train[i], all_y_train[i])*100) + "%")
predicted = logistic_regression.predict(all_x_test[i])
print("Testing Accuracy Score: " + str(accuracy_score(all_y_test[i], predicted)*100))
print("Confusion Matrix : \n" + str(confusion_matrix(all_y_test[i], predicted)))
print("Classification Report for 6-classes: ")
out_labels = [1, 2, 3, 4, 5, 6]
print(classification_report(all_y_test[i], predicted, out_labels, digits=4))
print("-"*100)
"""**Is there any overfit issue in Logistic Regrerssion?**<br/>
It's not getting overfit as we are getting good accuracy on training set as well as in testing set.<br/>
So, to assure the overfit issue we are using validation set and making the plot in which we are plotting training and validation mse (both) vs epoch during training in which we are getting one saturation point (epoch) in which training mse is decreasing but validation mse in not fllowing the same trend of training mse (either validation mse is constant or increaing).
"""
logistic_regression = OneVsRestClassifier(SGDClassifier(loss = 'log', alpha = alpha, fit_intercept = True, max_iter = epoch, tol = roh, n_iter_no_change = n_iter_no_change))
logistic_regression.fit(X_train, y_train)
print("Accuracy on training data: " + str(logistic_regression.score(X_train, y_train)*100) + "%")
print("-"*100)
test_predicted = logistic_regression.predict(all_x_test[0])
print("Testing Accuracy Score: " + str(accuracy_score(all_y_test[0], test_predicted)*100))
print("Testing Confusion Matrix : \n" + str(confusion_matrix(all_y_test[0], test_predicted)))
print("-"*100)
valid_predicted = logistic_regression.predict(X_validation)
print("Validation Accuracy Score: " + str(accuracy_score(y_validation, valid_predicted)*100))
print("Validation Confusion Matrix : \n" + str(confusion_matrix(y_validation, valid_predicted)))
print("-"*100)
"""# Single Layer Perceptron"""
# it's slp
class SingleLayerPerceptron():
def predict(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
return self.predict_(self.add_bias(X))
def predict_(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
pre_vals = np.dot(X, self.weights.T).reshape(-1,len(self.classes))
return self.softmax(pre_vals)
def softmax(self, z):
"""
z: 1D array of shape (no of patterns, )
"""
return np.exp(z) / np.sum(np.exp(z), axis=1).reshape(-1,1)
def predict_classes(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
self.probs_ = self.predict(X)
return np.vectorize(lambda c: self.classes[c])(np.argmax(self.probs_, axis=1))
def add_bias(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
return np.insert(X, 0, 1, axis=1)
def one_hot(self, y):
"""
y: 1D array of shape (no of patterns, )
"""
return np.eye(len(self.classes))[np.vectorize(lambda c: self.class_labels[c])(y).reshape(-1)]
def score(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
return np.mean(self.predict_classes(X) == y)
def evaluate_(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
return np.mean(np.argmax(self.predict_(X), axis=1) == np.argmax(y, axis=1))
def logloss(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return np.mean(-y*np.log(probs) - (1-y)*np.log(1-probs))
def cross_entropy(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return -1 * np.mean(y * np.log(probs))
def mse(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return (((y - probs)**2).mean())/2
def fit(self, X, y, epoch, roh, lr):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
epoch: int, convergence criteria (hyperparameter)
roh: float, convergence criteria (hyperparameter)
lr: float, learning rate (hyperparameter)
"""
self.epoch = epoch
self.roh = roh
self.lr = lr
self.classes = np.unique(y)
self.class_labels = {c:i for i,c in enumerate(self.classes)}
X = self.add_bias(X)
y = self.one_hot(y)
self.loss = []
self.weights = np.zeros(shape=(len(self.classes),X.shape[1]))*0.1
self.fit_data(X, y)
return self
def fit_data(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
itr = 0
while (not self.epoch or itr < self.epoch):
self.loss.append(self.mse(y, self.predict_(X)))
# put the thershold function on the predicted value i.e. here self.predict_(X)
temp = self.predict_(X)
# threshold activation on the predicted value of all patterns
for k in range(0, temp.shape[0]):
for j in range(0, temp.shape[1]):
if(temp[k][j]>=0.5):
temp[k][j] = 1
else:
temp[k][j] = 0
#print("Iteration: ", itr+1, " Mse: ", self.mse(y, self.predict_(X)))
error = y - temp
update = (self.lr * np.dot(error.T, X))
self.weights += update
if np.abs(update).max() < self.roh:
print("Converged through roh criteria.")
break
itr +=1
if(itr==self.epoch):
print("Converged through maximum of Iteration:")
"""Hyperparameter Tuning for slp"""
def slp_hyperparameter_tuning(epoch, alpha, roh, X_train, X_validation, y_train, y_validation):
"""
INPUT
epoch: 1D int arary contains different values of epochs (hyperparameter)
alpha: 1D float array contains different values of alphas or learning rates (hyperparameter)
roh: 1D float array contains different values of tolerence or roh (hyperparameter)
X_train: 2D array of shape = (no of patterns, no of features)
X_validation: 2D array of shape = (no of patterns, no of features)
y_train: 2D array of shape = (no of patterns, 1)
y_validation: 2D array of shape = (no of patterns, 1)
OUTPUT
best_hyperparameter: tuple (epoch, alpha, roh) which has best accuracy on the validation set.
"""
val_acc = []
for i in range(0, epoch.shape[0]):
# we are taking logloss function for error calculation
slp_classifier = SingleLayerPerceptron().fit(X_train, y_train, epoch = epoch[i], roh = roh[i], lr = alpha[i])
predicted = slp_classifier.predict_classes(X_validation)
val_acc.append(accuracy_score(y_validation, predicted)*100)
# Get the maximum accuracy on validation
max_value = max(val_acc)
max_index = val_acc.index(max_value)
best_hyperparameter = (epoch[max_index], alpha[max_index], roh[max_index])
print("Best Hyperparameter:")
print("Epoch = ", epoch[max_index])
print("Alpha = ", alpha[max_index])
print("Roh = ", roh[max_index])
return best_hyperparameter
epoch = np.array([100, 150, 200, 250, 300, 350, 400, 450, 500, 550])
alpha = np.array([0.0001, 0.0001, 0.00001, 0.00001, 0.00001, 0.00001, 0.00001, 0.00001, 0.0001, 0.0001])
roh = np.array([0.00001, 0.00001, 0.000001, 0.0000001, 0.000001, 0.0001, 0.0001, 0.0001, 0.0001, 0.000001])
epoch, alpha, roh = slp_hyperparameter_tuning(epoch, alpha, roh, X_train, X_validation, y_train, y_validation)
for i in range(0, 5): # for 5 fold
print("For fold no:", i+1)
print("-"*100)
slp = SingleLayerPerceptron().fit(all_x_train[i], all_y_train[i], epoch = epoch, roh = roh, lr = alpha)
print("Accuracy on training data: " + str(slp.score(all_x_train[i], all_y_train[i])*100) + "%")
predicted = slp.predict_classes(all_x_test[i])
print("Testing Accuracy Score: " + str(accuracy_score(all_y_test[i], predicted)*100))
print('Confusion Matrix : \n' + str(confusion_matrix(all_y_test[i], predicted)))
out_labels = [1, 2, 3, 4, 5, 6]
print(classification_report(all_y_test[i], predicted, out_labels, digits = 6))
print("-"*100)
# Overfit Detection
import numpy as np
import matplotlib.pyplot as plt
# it's slp
class Overfit_SingleLayerPerceptron():
def predict(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
return self.predict_(self.add_bias(X))
def predict_(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
pre_vals = np.dot(X, self.weights.T).reshape(-1, len(np.unique(y)))
return self.softmax(pre_vals)
def softmax(self, z):
"""
z: 1D array of shape (no of patterns, )
"""
return np.exp(z) / np.sum(np.exp(z), axis=1).reshape(-1,1)
def predict_classes(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
self.probs_ = self.predict(X)
return np.vectorize(lambda c: self.classes[c])(np.argmax(self.probs_, axis=1))
def add_bias(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
return np.insert(X, 0, 1, axis=1)
def one_hot(self, y):
"""
y: 1D array of shape (no of patterns, )
"""
return np.eye(len(self.classes))[np.vectorize(lambda c: self.class_labels[c])(y).reshape(-1)]
def score(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
return np.mean(self.predict_classes(X) == y)
def evaluate_(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
return np.mean(np.argmax(self.predict_(X), axis=1) == np.argmax(y, axis=1))
def logloss(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return np.mean(-y*np.log(probs) - (1-y)*np.log(1-probs))
def cross_entropy(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return -1 * np.mean(y * np.log(probs))
def mse(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return (((y - probs)**2).mean())/2
def fit(self, X, y, X_validation, y_validation, epoch, roh, lr):
"""
X: 2D array of shape (no of patterns in training set, no of features)
y: 1D array of shape (no of patterns in training set, )
X_validation: 2D array of shape (no of patterns in validation set, no of features)
y_validation: 1D array of shape (no of patterns in validation set, )
epoch: int, convergence criteria (hyperparameter)
roh: float, convergence criteria (hyperparameter)
lr: float, learning rate (hyperparameter)
"""
self.epoch = epoch
self.roh = roh
self.lr = lr
self.classes = np.unique(y)
self.class_labels = {c:i for i,c in enumerate(self.classes)}
X = self.add_bias(X)
X_validation = self.add_bias(X_validation)
y = self.one_hot(y)
y_valid = self.one_hot(y_validation)
self.loss_train = []
self.loss_valid = []
self.weights = np.zeros(shape=(len(self.classes),X.shape[1]))*0.1
self.fit_data(X, y, X_validation, y_valid)
return self
def fit_data(self, X, y, X_validation, y_valid):
"""
X: 2D array of shape (no of patterns in training set, no of features)
y: 1D array of shape (no of patterns in training set, )
X_validation: 2D array of shape (no of patterns in validation set, no of features)
y_validation: 1D array of shape (no of patterns in validation set, )
"""
itr = 0
while (not self.epoch or itr < self.epoch):
self.loss_train.append(self.mse(y, self.predict_(X)))
self.loss_valid.append(self.mse(y_valid, self.predict_(X_validation)))
# put the thershold function on the predicted value i.e. here self.predict_(X)
temp = self.predict_(X)
# threshold activation on the predicted value of all patterns
for k in range(0, temp.shape[0]):
for j in range(0, temp.shape[1]):
if(temp[k][j]>=0.5):
temp[k][j] = 1
else:
temp[k][j] = 0
#print("Iteration: ", itr+1, " Mse: ", self.mse(y, self.predict_(X)))
error = y - temp
update = (self.lr * np.dot(error.T, X))
self.weights += update
if np.abs(update).max() < self.roh:
print("Converged through roh criteria.")
break
itr +=1
if(itr==self.epoch):
print("Converged through maximum of Iteration:")
oslp = Overfit_SingleLayerPerceptron().fit(X_train, y_train, X_validation, y_validation, epoch, alpha, roh)
print("Accuracy on training data: " + str(oslp.score(X_train, y_train)*100) + "%")
print("-"*100)
test_predicted = oslp.predict_classes(all_x_test[0])
print("Testing Accuracy Score: " + str(accuracy_score(all_y_test[0], test_predicted)*100))
print("Testing Confusion Matrix : \n" + str(confusion_matrix(all_y_test[0], test_predicted)))
print("-"*100)
valid_predicted = oslp.predict_classes(X_validation)
print("Validation Accuracy Score: " + str(accuracy_score(y_validation, valid_predicted)*100))
print("Validation Confusion Matrix : \n" + str(confusion_matrix(y_validation, valid_predicted)))
print("-"*100)
loss_train_mse = oslp.loss_train
loss_valid_mse = oslp.loss_valid
def plotting(x, y_1, y_2, label_1, label_2, t):
plt.plot(x, y_1, label = label_1)
plt.plot(x, y_2, label = label_2)
plt.xlabel("Epoch")
plt.ylabel("Training and Validation MSE")
plt.legend()
plt.title(t)
plt.savefig("slp_overfit.pdf")
plt.show()
x = []
for i in range(0, len(loss_valid_mse)):
x.append(i)
plotting(x, loss_train_mse, loss_valid_mse, "Training MSE", "Validation MSE", "Training and Validation MSE")
"""# Sigmoid Neuron"""
# it's Sigmoid neuron
class SigmoidNeuron():
def predict(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
return self.predict_(self.add_bias(X))
def predict_(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
pre_vals = np.dot(X, self.weights.T).reshape(-1,len(self.classes))
return self.softmax(pre_vals)
def softmax(self, z):
"""
z: 1D array of shape (no of patterns, )
"""
return np.exp(z) / np.sum(np.exp(z), axis=1).reshape(-1,1)
def predict_classes(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
self.probs_ = self.predict(X)
return np.vectorize(lambda c: self.classes[c])(np.argmax(self.probs_, axis=1))
def add_bias(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
return np.insert(X, 0, 1, axis=1)
def one_hot(self, y):
"""
y: 1D array of shape (no of patterns, )
"""
return np.eye(len(self.classes))[np.vectorize(lambda c: self.class_labels[c])(y).reshape(-1)]
def score(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
return np.mean(self.predict_classes(X) == y)
def evaluate_(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
return np.mean(np.argmax(self.predict_(X), axis=1) == np.argmax(y, axis=1))
def logloss(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return np.mean(-y*np.log(probs) - (1-y)*np.log(1-probs))
def cross_entropy(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return -1 * np.mean(y * np.log(probs))
def mse(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return (((y - probs)**2).mean())/2
def fit(self, X, y, epoch, roh, lr):
"""
X: 2D array of shape (no of patterns in training set, no of features)
y: 1D array of shape (no of patterns in training set, )
epoch: int, convergence criteria (hyperparameter)
roh: float, convergence criteria (hyperparameter)
lr: float, learning rate (hyperparameter)
"""
self.epoch = epoch
self.roh = roh
self.lr = lr
self.classes = np.unique(y)
self.class_labels = {c:i for i,c in enumerate(self.classes)}
X = self.add_bias(X)
y = self.one_hot(y)
self.loss = []
self.weights = np.zeros(shape=(len(self.classes),X.shape[1]))*0.1
self.fit_data(X, y)
return self
def fit_data(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
itr = 0
while (not self.epoch or itr < self.epoch):
self.loss.append(self.mse(y, self.predict_(X)))
# put the thershold function on the predicted value i.e. here self.predict_(X)
temp = self.predict_(X)
#print("Iteration: ", itr+1, " Mse: ", self.mse(y, self.predict_(X)))
error = y - temp
update = (self.lr * np.dot(error.T, X))
self.weights += update
if np.abs(update).max() < self.roh:
print("Converged through roh criteria.")
break
itr +=1
if(itr==self.epoch):
print("Converged through maximum of Iteration:")
# hyperparameter tuning
def sn_hyperparameter_tuning(epoch, alpha, roh, X_train, X_validation, y_train, y_validation):
"""
INPUT
epoch: 1D int arary contains different values of epochs (hyperparameter)
alpha: 1D float array contains different values of alphas or learning rates (hyperparameter)
roh: 1D float array contains different values of tolerence or roh (hyperparameter)
X_train: 2D array of shape = (no of patterns, no of features)
X_validation: 2D array of shape = (no of patterns, no of features)
y_train: 2D array of shape = (no of patterns, 1)
y_validation: 2D array of shape = (no of patterns, 1)
OUTPUT
best_hyperparameter: tuple (epoch, alpha, roh) which has best accuracy on the validation set.
"""
val_acc = []
for i in range(0, epoch.shape[0]):
# we are taking logloss function for error calculation
sn_classifier = SigmoidNeuron().fit(X_train, y_train, epoch = epoch[i], roh = roh[i], lr = alpha[i])
predicted = sn_classifier.predict_classes(X_validation)
val_acc.append(accuracy_score(y_validation, predicted)*100)
# Get the maximum accuracy on validation
max_value = max(val_acc)
max_index = val_acc.index(max_value)
best_hyperparameter = (epoch[max_index], alpha[max_index], roh[max_index])
print("Best Hyperparameter:")
print("Epoch = ", epoch[max_index])
print("Alpha = ", alpha[max_index])
print("Roh = ", roh[max_index])
return best_hyperparameter
epoch = np.array([100, 150, 200, 250, 300, 350, 400, 450, 500, 550])
alpha = np.array([0.0001, 0.0001, 0.00001, 0.00001, 0.00001, 0.00001, 0.00001, 0.00001, 0.0001, 0.0001])
roh = np.array([0.00001, 0.00001, 0.000001, 0.0000001, 0.000001, 0.0001, 0.0001, 0.0001, 0.0001, 0.000001])
epoch, alpha, roh = sn_hyperparameter_tuning(epoch, alpha, roh, X_train, X_validation, y_train, y_validation)
for i in range(0, 5): # for 5 fold
print("For fold no:", i+1)
print("-"*100)
sn = SigmoidNeuron().fit(all_x_train[i], all_y_train[i], epoch = epoch, roh = roh, lr = alpha)
print("Accuracy on training data: " + str(sn.score(all_x_train[i], all_y_train[i])*100) + "%")
predicted = sn.predict_classes(all_x_test[i])
print("Testing Accuracy Score: " + str(accuracy_score(all_y_test[i], predicted)*100))
print('Confusion Matrix : \n' + str(confusion_matrix(all_y_test[i], predicted)))
out_labels = [1, 2, 3, 4, 5, 6]
print(classification_report(all_y_test[i], predicted, out_labels, digits = 6))
print("-"*100)
# Overfit Detection
import numpy as np
import matplotlib.pyplot as plt
# it's slp
class Overfit_SigmoidNeuron():
def predict(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
return self.predict_(self.add_bias(X))
def predict_(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
pre_vals = np.dot(X, self.weights.T).reshape(-1, len(np.unique(y)))
return self.softmax(pre_vals)
def softmax(self, z):
"""
z: 1D array of shape (no of patterns, )
"""
return np.exp(z) / np.sum(np.exp(z), axis=1).reshape(-1,1)
def predict_classes(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
self.probs_ = self.predict(X)
return np.vectorize(lambda c: self.classes[c])(np.argmax(self.probs_, axis=1))
def add_bias(self, X):
"""
X: 2D array of shape (no of patterns, no of features)
"""
return np.insert(X, 0, 1, axis=1)
def one_hot(self, y):
"""
y: 1D array of shape (no of patterns, )
"""
return np.eye(len(self.classes))[np.vectorize(lambda c: self.class_labels[c])(y).reshape(-1)]
def score(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
return np.mean(self.predict_classes(X) == y)
def evaluate_(self, X, y):
"""
X: 2D array of shape (no of patterns, no of features)
y: 1D array of shape (no of patterns, )
"""
return np.mean(np.argmax(self.predict_(X), axis=1) == np.argmax(y, axis=1))
def logloss(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return np.mean(-y*np.log(probs) - (1-y)*np.log(1-probs))
def cross_entropy(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return -1 * np.mean(y * np.log(probs))
def mse(self, y, probs):
"""
y: 1D array of shape (no of patterns, )
probs: 1D array of shape (no of patterns, )
"""
return (((y - probs)**2).mean())/2
def fit(self, X, y, X_validation, y_validation, epoch, roh, lr):
"""
X: 2D array of shape (no of patterns in training set, no of features)
y: 1D array of shape (no of patterns in training set, )
X_validation: 2D array of shape (no of patterns in validation set, no of features)
y_validation: 1D array of shape (no of patterns in validation set, )
epoch: int, convergence criteria (hyperparameter)
roh: float, convergence criteria (hyperparameter)
lr: float, learning rate (hyperparameter)
"""
self.epoch = epoch
self.roh = roh
self.lr = lr
self.classes = np.unique(y)
self.class_labels = {c:i for i,c in enumerate(self.classes)}
X = self.add_bias(X)
X_validation = self.add_bias(X_validation)
y = self.one_hot(y)
y_valid = self.one_hot(y_validation)
self.loss_train = []
self.loss_valid = []
self.weights = np.zeros(shape=(len(self.classes),X.shape[1]))*0.1
self.fit_data(X, y, X_validation, y_valid)
return self
def fit_data(self, X, y, X_validation, y_valid):
"""
X: 2D array of shape (no of patterns in training set, no of features)
y: 1D array of shape (no of patterns in training set, )
X_validation: 2D array of shape (no of patterns in validation set, no of features)
y_validation: 1D array of shape (no of patterns in validation set, )
"""
itr = 0
while (not self.epoch or itr < self.epoch):
self.loss_train.append(self.mse(y, self.predict_(X)))
self.loss_valid.append(self.mse(y_valid, self.predict_(X_validation)))
# put the thershold function on the predicted value i.e. here self.predict_(X)
temp = self.predict_(X)
#print("Iteration: ", itr+1, " Mse: ", self.mse(y, self.predict_(X)))
error = y - temp
update = (self.lr * np.dot(error.T, X))
self.weights += update
if np.abs(update).max() < self.roh:
print("Converged through roh criteria.")
break
itr +=1
if(itr==self.epoch):
print("Converged through maximum of Iteration:")
osn = Overfit_SigmoidNeuron().fit(X_train, y_train, X_validation, y_validation, epoch, alpha, roh)
print("Accuracy on training data: " + str(osn.score(X_train, y_train)*100) + "%")
print("-"*100)
test_predicted = osn.predict_classes(all_x_test[0])
print("Testing Accuracy Score: " + str(accuracy_score(all_y_test[0], test_predicted)*100))
print("Testing Confusion Matrix : \n" + str(confusion_matrix(all_y_test[0], test_predicted)))
print("-"*100)
valid_predicted = osn.predict_classes(X_validation)
print("Validation Accuracy Score: " + str(accuracy_score(y_validation, valid_predicted)*100))
print("Validation Confusion Matrix : \n" + str(confusion_matrix(y_validation, valid_predicted)))
print("-"*100)
loss_train_mse = osn.loss_train
loss_valid_mse = osn.loss_valid
def plotting(x, y_1, y_2, label_1, label_2, t):
plt.plot(x, y_1, label = label_1)
plt.plot(x, y_2, label = label_2)
plt.xlabel("Epoch")
plt.ylabel("Training and Validation MSE")
plt.legend()
plt.title(t)
plt.savefig("sn_overfit.pdf")
plt.show()
x = []
for i in range(0, len(loss_valid_mse)):
x.append(i)
plotting(x, loss_train_mse, loss_valid_mse, "Training MSE", "Validation MSE", "Training and Validation MSE")
"""# Multi-layer Perceptron (with only one hidden layer)<br/>
[ref-1](https://www.kaggle.com/vitorgamalemos/multilayer-perceptron-from-scratch)<br/>
[ref-2](https://github.com/eriklindernoren/ML-From-Scratch/blob/master/mlfromscratch/supervised_learning/multilayer_perceptron.py)<br/>
[ref-3](https://github.com/bamtak/machine-learning-implemetation-python/blob/master/Multi%20Class%20Logistic%20Regression.ipynb)
"""
from sklearn.neural_network import MLPClassifier
# hyperparameter tuning for logistic accuracy
def mlp_hyperparameter_tuning(no_of_hidden_neurons, epoch, alpha, roh, n_iter_no_change, X_train, X_validation, y_train, y_validation):
"""
INPUT
no_of_hidden_neurons: 1D int arary contains different values of no of neurons
present in 1st hidden layer (hyperparameter)
epoch: 1D int arary contains different values of epochs (hyperparameter)
alpha: 1D float array contains different values of alphas or learning rates (hyperparameter)
roh: 1D float array contains different values of tolerence or roh (hyperparameter)
n_iter_no_change: 1D int array conatins different values of
Number of iterations with no improvement to wait before stopping fitting (hyperparameter).
X_train: 2D array of shape = (no of patterns, no of features)
X_validation: 2D array of shape = (no of patterns, no of features)
y_train: 2D array of shape = (no of patterns, )
y_validation: 2D array of shape = (no of patterns, )
OUTPUT
best_hyperparameter: a tuple (epoch, alpha, roh, n_iter_no_change) which has best accuracy on the validation set.
"""
val_acc = []
for i in range(0, epoch.shape[0]):
mlp_classifier = MLPClassifier(hidden_layer_sizes = (no_of_hidden_neurons[i],), activation = 'logistic', solver = 'sgd', learning_rate = 'constant',\
learning_rate_init = alpha[i], max_iter = epoch[i], shuffle = True, random_state = 100, tol = roh[i],\
verbose = False, early_stopping = True, n_iter_no_change = n_iter_no_change[i]).fit(X_train, y_train)
# we are taking logloss function for error calculation
predicted = mlp_classifier.predict(X_validation)
val_acc.append(accuracy_score(y_validation, predicted)*100)
# Get the maximum accuracy on validation
max_value = max(val_acc)
max_index = val_acc.index(max_value)
best_hyperparameter = (no_of_hidden_neurons[max_index], epoch[max_index], alpha[max_index], roh[max_index], n_iter_no_change[max_index])
print("Best Hyperparameter:")
print("No of neurons in the 1st hidden layer = ", no_of_hidden_neurons[max_index])
print("Epoch = ", epoch[max_index])
print("Alpha = ", alpha[max_index])
print("Roh = ", roh[max_index])
print("n_iter_no_change (Number of iterations with no improvement) = ", n_iter_no_change[max_index])
return best_hyperparameter
no_of_hidden_neurons = np.array([561, 581, 591, 600, 620, 650, 670, 700, 730, 750])
"""
No of neurons present in 1st hidden layer will be greater than the no of features
i.e. no of neurons present in the input layer to increase the dimention in which
we can solve the non linear problem also.
"""
epoch = np.array([100, 150, 200, 250, 300, 350, 400, 450, 500, 550])
alpha = np.array([0.01, 0.001, 0.0001, 0.00001, 0.125, 0.15, 0.18, 0.2, 0.25, 0.3])
roh = np.array([0.00001, 0.00001, 0.000001, 0.0000001, 0.000001, 0.0001, 0.0001, 0.0001, 0.0001, 0.000001])
n_iter_no_change = np.array([8, 9, 10, 11, 12, 13, 14, 15, 16, 18])
no_of_hidden_neurons, epoch, alpha, roh, n_iter_no_change = mlp_hyperparameter_tuning(no_of_hidden_neurons, epoch, alpha, roh, n_iter_no_change, X_train, X_validation, y_train, y_validation)
for i in range(0, 5): # for 5 fold
print("For fold no:", i+1)
print("-"*100)
mlp_classifier = MLPClassifier(hidden_layer_sizes = (no_of_hidden_neurons,), activation = 'logistic',\
solver = 'sgd', learning_rate = 'constant', learning_rate_init = alpha,\
max_iter = epoch, shuffle = True, random_state = 100, tol = roh,\
verbose = False, early_stopping = True, n_iter_no_change = n_iter_no_change)
mlp_classifier.fit(all_x_train[i], all_y_train[i])
print("Accuracy on training data: " + str(mlp_classifier.score(all_x_train[i], all_y_train[i])*100) + "%")
predicted = mlp_classifier.predict(all_x_test[i])
print("Testing Accuracy Score: " + str(accuracy_score(all_y_test[i], predicted)*100))
print("Confusion Matrix : \n" + str(confusion_matrix(all_y_test[i], predicted)))
print("Classification Report for 6-classes: ")
out_labels = [1, 2, 3, 4, 5, 6]
print(classification_report(all_y_test[i], predicted, out_labels, digits=4))
print("-"*100)
# Is there any overfit issue?
mlp_classifier = MLPClassifier(hidden_layer_sizes = (no_of_hidden_neurons,), activation = 'logistic',\
solver = 'sgd', learning_rate = 'constant', learning_rate_init = alpha,\
max_iter = epoch, shuffle = True, random_state = 100, tol = roh,\
verbose = False, early_stopping = True, n_iter_no_change = n_iter_no_change)
mlp_classifier.fit(X_train, y_train)
print("Accuracy on training data: " + str(mlp_classifier.score(X_train, y_train)*100) + "%")
print("-"*100)
test_predicted = mlp_classifier.predict(all_x_test[0])
print("Testing Accuracy Score: " + str(accuracy_score(all_y_test[0], test_predicted)*100))
print("Testing Confusion Matrix : \n" + str(confusion_matrix(all_y_test[0], test_predicted)))
print("-"*100)
valid_predicted = mlp_classifier.predict(X_validation)
print("Validation Accuracy Score: " + str(accuracy_score(y_validation, valid_predicted)*100))
print("Validation Confusion Matrix : \n" + str(confusion_matrix(y_validation, valid_predicted)))
print("-"*100)
"""**Feature Selection using PCA to get the optimal features**
"""
from sklearn.decomposition import PCA
# Let's say, components = 2
pca = PCA(n_components = 20)
pca.fit(X)
x_pca = pca.transform(X)
x_pca.shape
all_x_train, all_x_test, all_y_train, all_y_test = fold(x_pca, y) | 42.704995 | 369 | 0.650033 | 6,789 | 45,310 | 4.206658 | 0.079688 | 0.024231 | 0.04538 | 0.063097 | 0.805245 | 0.787843 | 0.766764 | 0.75941 | 0.740362 | 0.738191 | 0 | 0.033246 | 0.22463 | 45,310 | 1,061 | 370 | 42.704995 | 0.77966 | 0.253631 | 0 | 0.704587 | 1 | 0.00367 | 0.112479 | 0.018225 | 0 | 0 | 0 | 0 | 0 | 1 | 0.108257 | false | 0 | 0.042202 | 0 | 0.255046 | 0.179817 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
daf1b61264275770d50b43ad0b0af2cb07196bda | 22,687 | py | Python | unit_tests/test_mania_score_data_press.py | abraker-osu/osu_analyzer | f930b1e75d1c4c973dfa49fdab2afedb2a432e31 | [
"MIT"
] | null | null | null | unit_tests/test_mania_score_data_press.py | abraker-osu/osu_analyzer | f930b1e75d1c4c973dfa49fdab2afedb2a432e31 | [
"MIT"
] | null | null | null | unit_tests/test_mania_score_data_press.py | abraker-osu/osu_analyzer | f930b1e75d1c4c973dfa49fdab2afedb2a432e31 | [
"MIT"
] | null | null | null | import unittest
import numpy as np
from analysis.mania.action_data import ManiaActionData
from analysis.mania.score_data import ManiaScoreData
class TestManiaScoreDataPress(unittest.TestCase):
@classmethod
def setUpClass(cls):
map_data = np.asarray([
[ 50, 51, 3 ],
[ 100, 101, 3 ],
[ 150, 250, 3 ],
[ 300, 301, 0 ],
[ 300, 350, 3 ],
[ 450, 451, 0 ],
[ 450, 500, 1 ],
[ 450, 451, 2 ],
[ 450, 451, 3 ]
])
map_col_filter = map_data[:, ManiaActionData.IDX_COL] == 3
map_idx_max = map_data[map_col_filter].shape[0]
map_col = np.empty((map_idx_max*2, 2))
map_col[:map_idx_max, 0] = map_data[map_col_filter][:, ManiaActionData.IDX_STIME]
map_col[map_idx_max:, 0] = map_data[map_col_filter][:, ManiaActionData.IDX_ETIME]
map_col[:map_idx_max, 1] = ManiaActionData.PRESS
map_col[map_idx_max:, 1] = ManiaActionData.RELEASE
map_sort = map_col.argsort(axis=0)
map_col = map_col[map_sort[:, 0]]
cls.map_times = map_col[:, 0]
cls.map_types = map_col[:, 1]
# Set hitwindow ranges to what these tests have been written for
ManiaScoreData.pos_hit_range = 100 # ms point of late hit window
ManiaScoreData.neg_hit_range = 100 # ms point of early hit window
ManiaScoreData.pos_hit_miss_range = 150 # ms point of late miss window
ManiaScoreData.neg_hit_miss_range = 150 # ms point of early miss window
ManiaScoreData.pos_rel_range = 100 # ms point of late release window
ManiaScoreData.neg_rel_range = 100 # ms point of early release window
ManiaScoreData.pos_rel_miss_range = 150 # ms point of late release window
ManiaScoreData.neg_rel_miss_range = 150 # ms point of early release window
@classmethod
def tearDown(cls):
pass
def test_no_press__singlenote_press__noblank_nolazy(self):
# Time: -1000 ms -> 1000 ms
# Scoring: Awaiting press at first singlenote (50 ms @ (col 3))
ManiaScoreData.blank_miss = False
ManiaScoreData.lazy_sliders = False
map_idx = 0
scorepoint_type = self.map_types[map_idx]
self.assertEqual(scorepoint_type, ManiaActionData.PRESS)
for ms in range(-1000, 1000):
column_data = {}
offset = ms - self.map_times[map_idx]
adv = ManiaScoreData._ManiaScoreData__process_free(column_data, scorepoint_type, ms, self.map_times, map_idx)
if offset <= ManiaScoreData.pos_hit_miss_range:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
else:
self.assertEqual(adv, 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_MISSP, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
def test_press__singlenote_press__noblank_nolazy(self):
# Time: -1000 ms -> 1000 ms
# Scoring: Awaiting press at first singlenote (50 ms @ (col 3))
ManiaScoreData.blank_miss = False
ManiaScoreData.lazy_sliders = False
map_idx = 0
scorepoint_type = self.map_types[map_idx]
self.assertEqual(scorepoint_type, ManiaActionData.PRESS)
for ms in range(-1000, 1000):
column_data = {}
offset = ms - self.map_times[map_idx]
adv = ManiaScoreData._ManiaScoreData__process_press(column_data, ms, self.map_times, map_idx)
if offset <= -ManiaScoreData.neg_hit_miss_range:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif -ManiaScoreData.neg_hit_miss_range < offset <= -ManiaScoreData.neg_hit_range:
self.assertEqual(adv, 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_MISSP, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif -ManiaScoreData.neg_hit_range < offset <= ManiaScoreData.pos_hit_range:
self.assertEqual(adv, 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_HITP, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif ManiaScoreData.pos_hit_range < offset <= ManiaScoreData.pos_hit_miss_range:
self.assertEqual(adv, 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_MISSP, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif ManiaScoreData.pos_hit_miss_range < offset:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
else:
self.fail(f'Unexpected condition | Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
def test_free__singlenote_release__noblank_nolazy(self):
# Time: -1000 ms -> 1000 ms
# Scoring: Awaiting release at first singlenote (100 ms @ (col 3))
ManiaScoreData.blank_miss = False
ManiaScoreData.lazy_sliders = False
map_idx = 1
scorepoint_type = self.map_types[map_idx]
self.assertEqual(scorepoint_type, ManiaActionData.RELEASE)
for ms in range(-1000, 1000):
column_data = {}
offset = ms - self.map_times[map_idx]
adv = ManiaScoreData._ManiaScoreData__process_free(column_data, scorepoint_type, ms, self.map_times, map_idx)
if offset <= ManiaScoreData.pos_hit_miss_range:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
else:
self.assertEqual(adv, 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
def test_release__singlenote_release__noblank_nolazy(self):
# Time: -1000 ms -> 1000 ms
# Scoring: Awaiting release at first singlenote (100 ms @ (col 3))
ManiaScoreData.blank_miss = False
ManiaScoreData.lazy_sliders = False
map_idx = 1
scorepoint_type = self.map_types[map_idx]
self.assertEqual(scorepoint_type, ManiaActionData.RELEASE)
for ms in range(-1000, 1000):
column_data = {}
offset = ms - self.map_times[map_idx]
adv = ManiaScoreData._ManiaScoreData__process_release(column_data, ms, self.map_times, map_idx)
self.assertEqual(adv, 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
def test_free__holdnote_press__noblank_nolazy(self):
# Time: -1000 ms -> 1000 ms
# Scoring: Awaiting press at first singlenote (150 ms @ (col 3))
ManiaScoreData.blank_miss = False
ManiaScoreData.lazy_sliders = False
map_idx = 4
scorepoint_type = self.map_types[map_idx]
self.assertEqual(scorepoint_type, ManiaActionData.PRESS)
for ms in range(-1000, 1000):
column_data = {}
offset = ms - self.map_times[map_idx]
adv = ManiaScoreData._ManiaScoreData__process_free(column_data, scorepoint_type, ms, self.map_times, map_idx)
if offset <= ManiaScoreData.pos_hit_miss_range:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
else:
self.assertEqual(adv, 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_MISSP, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
def test_press__holdnote_press__noblank_nolazy(self):
# Time: -1000 ms -> 1000 ms
# Scoring: Awaiting press at first singlenote (150 ms @ (col 3))
ManiaScoreData.blank_miss = False
ManiaScoreData.lazy_sliders = False
map_idx = 4
scorepoint_type = self.map_types[map_idx]
self.assertEqual(scorepoint_type, ManiaActionData.PRESS)
for ms in range(-1000, 1000):
column_data = {}
offset = ms - self.map_times[map_idx]
adv = ManiaScoreData._ManiaScoreData__process_press(column_data, ms, self.map_times, map_idx)
if offset <= -ManiaScoreData.neg_hit_miss_range:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif -ManiaScoreData.neg_hit_miss_range < offset <= -ManiaScoreData.neg_hit_range:
self.assertEqual(adv, 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_MISSP, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif -ManiaScoreData.neg_hit_range < offset <= ManiaScoreData.pos_hit_range:
self.assertEqual(adv, 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_HITP, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif ManiaScoreData.pos_hit_range < offset <= ManiaScoreData.pos_hit_miss_range:
self.assertEqual(adv, 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 2, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_MISSP, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif ManiaScoreData.pos_hit_miss_range < offset:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
else:
self.fail(f'Unexpected condition | Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
def test_release__holdnote_release__noblank_nolazy(self):
# Time: -1000 ms -> 1000 ms
# Scoring: Awaiting release at first singlenote (250 ms @ (col 3))
ManiaScoreData.blank_miss = False
ManiaScoreData.lazy_sliders = False
map_idx = 5
scorepoint_type = self.map_types[map_idx]
self.assertEqual(scorepoint_type, ManiaActionData.RELEASE)
for ms in range(-1000, 1000):
column_data = {}
offset = ms - self.map_times[map_idx]
adv = ManiaScoreData._ManiaScoreData__process_release(column_data, ms, self.map_times, map_idx)
if offset <= -ManiaScoreData.neg_rel_miss_range:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif -ManiaScoreData.neg_rel_miss_range < offset <= -ManiaScoreData.neg_rel_range:
self.assertEqual(adv, 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_MISSR, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif -ManiaScoreData.neg_rel_range < offset <= ManiaScoreData.pos_rel_range:
self.assertEqual(adv, 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_HITR, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif ManiaScoreData.pos_rel_range < offset <= ManiaScoreData.pos_rel_miss_range:
self.assertEqual(adv, 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertIn(0, column_data, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][0], ms, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][1], self.map_times[map_idx], f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][2], ManiaScoreData.TYPE_MISSR, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(column_data[0][3], map_idx, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
elif ManiaScoreData.pos_rel_miss_range < offset:
self.assertEqual(adv, 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
else:
self.fail(f'Unexpected condition | Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
def test_release__holdnote_release__noblank_lazy(self):
# Time: -1000 ms -> 1000 ms
# Scoring: Awaiting release at first singlenote (250 ms @ (col 3))
ManiaScoreData.blank_miss = False
ManiaScoreData.lazy_sliders = True
map_idx = 5
scorepoint_type = self.map_types[map_idx]
self.assertEqual(scorepoint_type, ManiaActionData.RELEASE)
for ms in range(-1000, 1000):
column_data = {}
offset = ms - self.map_times[map_idx]
adv = ManiaScoreData._ManiaScoreData__process_release(column_data, ms, self.map_times, map_idx)
self.assertEqual(adv, 1, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
self.assertEqual(len(column_data), 0, f'Offset: {offset} ms; Replay: {ms} ms; Map: {self.map_times[map_idx]} ms')
| 64.269122 | 158 | 0.614008 | 3,155 | 22,687 | 4.20729 | 0.038352 | 0.073678 | 0.108558 | 0.146904 | 0.951409 | 0.943725 | 0.930616 | 0.918563 | 0.918563 | 0.911632 | 0 | 0.023892 | 0.249129 | 22,687 | 353 | 159 | 64.269122 | 0.755327 | 0.046987 | 0 | 0.785425 | 0 | 0.417004 | 0.361039 | 0.119235 | 0 | 0 | 0 | 0 | 0.437247 | 1 | 0.040486 | false | 0.004049 | 0.016194 | 0 | 0.060729 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
9711221ba921dacee2da6a6ba3a309245c86ccdd | 140 | py | Python | src/test.py | nrbabcock/HeartOfGold | 279f473da091de937614f8824fbb1f8e65b2d1a3 | [
"MIT"
] | null | null | null | src/test.py | nrbabcock/HeartOfGold | 279f473da091de937614f8824fbb1f8e65b2d1a3 | [
"MIT"
] | null | null | null | src/test.py | nrbabcock/HeartOfGold | 279f473da091de937614f8824fbb1f8e65b2d1a3 | [
"MIT"
] | null | null | null | from rlutilities.linear_algebra import *
print(angle_between(vec3(0,1,0), vec3(1, 1, 0)))
print(angle_between(vec3(0,1,0), vec3(-1, 1, 0))) | 35 | 49 | 0.707143 | 27 | 140 | 3.555556 | 0.407407 | 0.083333 | 0.354167 | 0.4375 | 0.645833 | 0.645833 | 0.645833 | 0.645833 | 0.645833 | 0.645833 | 0 | 0.125 | 0.085714 | 140 | 4 | 49 | 35 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0.666667 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 8 |
971bf453d61b09afd022bfcdf8faf64d62add672 | 114,951 | py | Python | laserfiche_api/api/entries_api.py | Layer8Err/laserfiche_api | 8c9030c8f5cc245b61858bd096a1ad3c58cdbfd2 | [
"BSD-2-Clause"
] | 1 | 2021-06-17T23:51:25.000Z | 2021-06-17T23:51:25.000Z | laserfiche_api/api/entries_api.py | Layer8Err/laserfiche_api | 8c9030c8f5cc245b61858bd096a1ad3c58cdbfd2 | [
"BSD-2-Clause"
] | null | null | null | laserfiche_api/api/entries_api.py | Layer8Err/laserfiche_api | 8c9030c8f5cc245b61858bd096a1ad3c58cdbfd2 | [
"BSD-2-Clause"
] | null | null | null | # coding: utf-8
"""
Laserfiche API
Welcome to the Laserfiche API Swagger Playground. You can try out any of our API calls against your live Laserfiche Cloud account. Visit the developer center for more details: <a href=\"https://developer.laserfiche.com\">https://developer.laserfiche.com</a><p><strong>Build# : </strong>650780</p> # noqa: E501
OpenAPI spec version: 1-alpha
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from laserfiche_api.api_client import ApiClient
class EntriesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def assign_entry_links(self, repo_id, entry_id, **kwargs): # noqa: E501
"""assign_entry_links # noqa: E501
- Assign links to an entry. - Provide an entry ID and a list of links to assign to that entry. - This is an overwrite action. The request must include all links to assign to the entry, including existing links that should remain assigned to the entry. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entry_links(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The request repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param list[PutLinksRequest] body:
:return: ODataValueOfIListOfWEntryLinkInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_entry_links_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.assign_entry_links_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def assign_entry_links_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""assign_entry_links # noqa: E501
- Assign links to an entry. - Provide an entry ID and a list of links to assign to that entry. - This is an overwrite action. The request must include all links to assign to the entry, including existing links that should remain assigned to the entry. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entry_links_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The request repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param list[PutLinksRequest] body:
:return: ODataValueOfIListOfWEntryLinkInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_entry_links" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `assign_entry_links`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `assign_entry_links`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/links', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ODataValueOfIListOfWEntryLinkInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_field_values(self, repo_id, entry_id, **kwargs): # noqa: E501
"""assign_field_values # noqa: E501
- Update field values assigned to an entry. - Provide the new field values to assign to the entry, and remove/reset all previously assigned field values. - This is an overwrite action. The request body must include all desired field values, including any existing field values that should remain assigned to the entry. Field values that are not included in the request will be deleted from the entry. If the field value that is not included is part of a template, it will still be assigned (as required by the template), but its value will be reset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_field_values(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The entry ID of the entry that will have its fields updated. (required)
:param dict(str, FieldToUpdate) body:
:return: ODataValueOfIListOfFieldValue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_field_values_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.assign_field_values_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def assign_field_values_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""assign_field_values # noqa: E501
- Update field values assigned to an entry. - Provide the new field values to assign to the entry, and remove/reset all previously assigned field values. - This is an overwrite action. The request body must include all desired field values, including any existing field values that should remain assigned to the entry. Field values that are not included in the request will be deleted from the entry. If the field value that is not included is part of a template, it will still be assigned (as required by the template), but its value will be reset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_field_values_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The entry ID of the entry that will have its fields updated. (required)
:param dict(str, FieldToUpdate) body:
:return: ODataValueOfIListOfFieldValue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_field_values" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `assign_field_values`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `assign_field_values`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/fields', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ODataValueOfIListOfFieldValue', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_tags(self, repo_id, entry_id, **kwargs): # noqa: E501
"""assign_tags # noqa: E501
- Assign tags to an entry. - Provide an entry ID and a list of tags to assign to that entry. - This is an overwrite action. The request must include all tags to assign to the entry, including existing tags that should remain assigned to the entry. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_tags(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param PutTagRequest body: The tags to add.
:return: ODataValueOfIListOfWTagInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_tags_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.assign_tags_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def assign_tags_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""assign_tags # noqa: E501
- Assign tags to an entry. - Provide an entry ID and a list of tags to assign to that entry. - This is an overwrite action. The request must include all tags to assign to the entry, including existing tags that should remain assigned to the entry. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_tags_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param PutTagRequest body: The tags to add.
:return: ODataValueOfIListOfWTagInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `assign_tags`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `assign_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/tags', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ODataValueOfIListOfWTagInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def copy_entry_async(self, repo_id, entry_id, **kwargs): # noqa: E501
"""copy_entry_async # noqa: E501
- Copy a new child entry in the designated folder async, and potentially return an operationToken. - Provide the parent folder id, and copy an entry as a child of the designated folder. - Optional parameter: autoRename (default false). If an entry already exists with the given name, the entry will be automatically renamed. - The status of the operation can be checked via the Tasks/{operationToken} route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.copy_entry_async(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The folder id that the entry will be created in. (required)
:param CopyAsyncRequest body: Copy entry request.
:param bool auto_rename: An optional query parameter used to indicate if the new entry should be automatically renamed if an entry already exists with the given name in the folder. The default value is false.
:return: AcceptedOperation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.copy_entry_async_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.copy_entry_async_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def copy_entry_async_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""copy_entry_async # noqa: E501
- Copy a new child entry in the designated folder async, and potentially return an operationToken. - Provide the parent folder id, and copy an entry as a child of the designated folder. - Optional parameter: autoRename (default false). If an entry already exists with the given name, the entry will be automatically renamed. - The status of the operation can be checked via the Tasks/{operationToken} route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.copy_entry_async_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The folder id that the entry will be created in. (required)
:param CopyAsyncRequest body: Copy entry request.
:param bool auto_rename: An optional query parameter used to indicate if the new entry should be automatically renamed if an entry already exists with the given name in the folder. The default value is false.
:return: AcceptedOperation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body', 'auto_rename'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method copy_entry_async" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `copy_entry_async`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `copy_entry_async`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
if 'auto_rename' in params:
query_params.append(('autoRename', params['auto_rename'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/Laserfiche.Repository.Folder/CopyAsync', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AcceptedOperation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_or_copy_entry(self, repo_id, entry_id, **kwargs): # noqa: E501
"""create_or_copy_entry # noqa: E501
- Create/copy a new child entry in the designated folder. - Provide the parent folder id, and based on the request body, copy or create a folder/shortcut as a child entry of the designated folder. - Optional parameter: autoRename (default false). If an entry already exists with the given name, the entry will be automatically renamed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_or_copy_entry(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The folder id that the entry will be created in. (required)
:param PostEntryChildrenRequest body: The entry to create.
:param bool auto_rename: An optional query parameter used to indicate if the new entry should be automatically renamed if an entry already exists with the given name in the folder. The default value is false.
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_or_copy_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.create_or_copy_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def create_or_copy_entry_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""create_or_copy_entry # noqa: E501
- Create/copy a new child entry in the designated folder. - Provide the parent folder id, and based on the request body, copy or create a folder/shortcut as a child entry of the designated folder. - Optional parameter: autoRename (default false). If an entry already exists with the given name, the entry will be automatically renamed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_or_copy_entry_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The folder id that the entry will be created in. (required)
:param PostEntryChildrenRequest body: The entry to create.
:param bool auto_rename: An optional query parameter used to indicate if the new entry should be automatically renamed if an entry already exists with the given name in the folder. The default value is false.
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body', 'auto_rename'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_or_copy_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `create_or_copy_entry`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `create_or_copy_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
if 'auto_rename' in params:
query_params.append(('autoRename', params['auto_rename'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/Laserfiche.Repository.Folder/children', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Entry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_assigned_template(self, repo_id, entry_id, **kwargs): # noqa: E501
"""delete_assigned_template # noqa: E501
- Remove the currently assigned template from the specified entry. - Provide an entry id to clear template value on. - If the entry does not have a template assigned, no change will be made. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_assigned_template(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The id of the entry that will have its template removed. (required)
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_assigned_template_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.delete_assigned_template_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def delete_assigned_template_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""delete_assigned_template # noqa: E501
- Remove the currently assigned template from the specified entry. - Provide an entry id to clear template value on. - If the entry does not have a template assigned, no change will be made. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_assigned_template_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The id of the entry that will have its template removed. (required)
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_assigned_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `delete_assigned_template`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `delete_assigned_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/template', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Entry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_entry_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""delete_entry_info # noqa: E501
- Begins a task to delete an entry, and returns an operationToken. - Provide an entry ID, and queue a delete task to remove it from the repository (includes nested objects if the entry is a Folder type). The entry will not be deleted immediately. - Optionally include an audit reason ID and comment in the JSON body. This route returns an operationToken, and will run as an asynchronous operation. Check the progress via the Tasks/{operationToken} route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_entry_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param DeleteEntryWithAuditReason body:
:return: AcceptedOperation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_entry_info_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.delete_entry_info_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def delete_entry_info_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""delete_entry_info # noqa: E501
- Begins a task to delete an entry, and returns an operationToken. - Provide an entry ID, and queue a delete task to remove it from the repository (includes nested objects if the entry is a Folder type). The entry will not be deleted immediately. - Optionally include an audit reason ID and comment in the JSON body. This route returns an operationToken, and will run as an asynchronous operation. Check the progress via the Tasks/{operationToken} route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_entry_info_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param DeleteEntryWithAuditReason body:
:return: AcceptedOperation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_entry_info" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `delete_entry_info`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `delete_entry_info`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AcceptedOperation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def export_document(self, repo_id, entry_id, **kwargs): # noqa: E501
"""export_document # noqa: E501
- Get an entry's edoc resource in a stream format. - Provide an entry id, and get the edoc resource as part of the response content. - Optional header: Range. Use the Range header (single range with byte unit) to retrieve partial content of the edoc, rather than the entire edoc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_document(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested document id. (required)
:param str range: An optional header used to retrieve partial content of the edoc. Only supports single range with byte unit.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.export_document_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.export_document_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def export_document_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""export_document # noqa: E501
- Get an entry's edoc resource in a stream format. - Provide an entry id, and get the edoc resource as part of the response content. - Optional header: Range. Use the Range header (single range with byte unit) to retrieve partial content of the edoc, rather than the entire edoc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_document_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested document id. (required)
:param str range: An optional header used to retrieve partial content of the edoc. Only supports single range with byte unit.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'range'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method export_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `export_document`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `export_document`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
if 'range' in params:
header_params['Range'] = params['range'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream', 'application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/Laserfiche.Repository.Document/edoc', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def export_document_with_audit_reason(self, repo_id, entry_id, **kwargs): # noqa: E501
"""export_document_with_audit_reason # noqa: E501
- Get an entry's edoc resource in a stream format while including an audit reason. - Provide an entry id and audit reason/comment in the request body, and get the edoc resource as part of the response content. - Optional header: Range. Use the Range header (single range with byte unit) to retrieve partial content of the edoc, rather than the entire edoc. This route is identical to the GET edoc route, but allows clients to include an audit reason when downloading the edoc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_document_with_audit_reason(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested document id. (required)
:param GetEdocWithAuditReasonRequest body:
:param str range: An optional header used to retrieve partial content of the edoc. Only supports single range with byte unit.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.export_document_with_audit_reason_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.export_document_with_audit_reason_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def export_document_with_audit_reason_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""export_document_with_audit_reason # noqa: E501
- Get an entry's edoc resource in a stream format while including an audit reason. - Provide an entry id and audit reason/comment in the request body, and get the edoc resource as part of the response content. - Optional header: Range. Use the Range header (single range with byte unit) to retrieve partial content of the edoc, rather than the entire edoc. This route is identical to the GET edoc route, but allows clients to include an audit reason when downloading the edoc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_document_with_audit_reason_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested document id. (required)
:param GetEdocWithAuditReasonRequest body:
:param str range: An optional header used to retrieve partial content of the edoc. Only supports single range with byte unit.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body', 'range'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method export_document_with_audit_reason" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `export_document_with_audit_reason`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `export_document_with_audit_reason`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
if 'range' in params:
header_params['Range'] = params['range'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/Laserfiche.Repository.Document/GetEdocWithAuditReason', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_document_content_type(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_document_content_type # noqa: E501
- Get information about the edoc content of an entry, without downloading the edoc in its entirety. - Provide an entry id, and get back the Content-Type and Content-Length in the response headers. - This route does not provide a way to download the actual edoc. Instead, it just gives metadata information about the edoc associated with the entry. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_document_content_type(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested document id. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_document_content_type_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.get_document_content_type_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def get_document_content_type_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_document_content_type # noqa: E501
- Get information about the edoc content of an entry, without downloading the edoc in its entirety. - Provide an entry id, and get back the Content-Type and Content-Length in the response headers. - This route does not provide a way to download the actual edoc. Instead, it just gives metadata information about the edoc associated with the entry. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_document_content_type_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested document id. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_document_content_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `get_document_content_type`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `get_document_content_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/Laserfiche.Repository.Document/edoc', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dynamic_field_values(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_dynamic_field_values # noqa: E501
- Get dynamic field logic values with the current values of the fields in the template. - Provide an entry id and field values in the JSON body to get dynamic field logic values. Independent and non-dynamic fields in the request body will be ignored, and only related dynamic field logic values for the assigned template will be returned. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dynamic_field_values(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested entry id. (required)
:param GetDynamicFieldLogicValueRequest body:
:return: dict(str, list[str])
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dynamic_field_values_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.get_dynamic_field_values_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def get_dynamic_field_values_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_dynamic_field_values # noqa: E501
- Get dynamic field logic values with the current values of the fields in the template. - Provide an entry id and field values in the JSON body to get dynamic field logic values. Independent and non-dynamic fields in the request body will be ignored, and only related dynamic field logic values for the assigned template will be returned. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dynamic_field_values_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested entry id. (required)
:param GetDynamicFieldLogicValueRequest body:
:return: dict(str, list[str])
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dynamic_field_values" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `get_dynamic_field_values`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `get_dynamic_field_values`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/fields/GetDynamicFieldLogicValue', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, list[str])', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entry(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_entry # noqa: E501
- Returns a single entry object. - Provide an entry ID, and get the entry associated with that ID. Useful when detailed information about the entry is required, such as metadata, path information, etc. - Allowed OData query options: Select. If the entry is a subtype (Folder, Document, or Shortcut), the entry will automatically be converted to include those model-specific properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entry(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param str select: Limits the properties returned in the result.
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.get_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def get_entry_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_entry # noqa: E501
- Returns a single entry object. - Provide an entry ID, and get the entry associated with that ID. Useful when detailed information about the entry is required, such as metadata, path information, etc. - Allowed OData query options: Select. If the entry is a subtype (Folder, Document, or Shortcut), the entry will automatically be converted to include those model-specific properties. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entry_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param str select: Limits the properties returned in the result.
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'select'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `get_entry`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `get_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
if 'select' in params:
query_params.append(('$select', params['select'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Entry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entry_listing(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_entry_listing # noqa: E501
- Returns the children entries of a folder in the repository. - Provide an entry ID (must be a folder), and get a paged listing of entries in that folder. Used as a way of navigating through the repository. - Default page size: 100. Allowed OData query options: Select | Count | OrderBy | Skip | Top | SkipToken | Prefer. OData $OrderBy syntax should follow: \"PropertyName direction,PropertyName2 direction\". Sort order can be either value \"asc\" or \"desc\". Optional query parameters: groupByOrderType (bool). This query parameter decides if results are returned in groups based on their entry type. Entries returned in the listing are not automatically converted to their subtype (Folder, Shortcut, Document), so clients who want model-specific information should request it via the GET entry by ID route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entry_listing(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The folder ID. (required)
:param bool group_by_entry_type: An optional query parameter used to indicate if the result should be grouped by entry type or not.
:param str prefer: An optional OData header. Can be used to set the maximum page size using odata.maxpagesize.
:param str select: Limits the properties returned in the result.
:param str orderby: Specifies the order in which items are returned. The maximum number of expressions is 5.
:param int top: Limits the number of items returned from a collection.
:param int skip: Excludes the specified number of items of the queried collection from the result.
:param bool count: Indicates whether the total count of items within a collection are returned in the result.
:return: ODataValueOfIListOfEntry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entry_listing_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.get_entry_listing_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def get_entry_listing_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_entry_listing # noqa: E501
- Returns the children entries of a folder in the repository. - Provide an entry ID (must be a folder), and get a paged listing of entries in that folder. Used as a way of navigating through the repository. - Default page size: 100. Allowed OData query options: Select | Count | OrderBy | Skip | Top | SkipToken | Prefer. OData $OrderBy syntax should follow: \"PropertyName direction,PropertyName2 direction\". Sort order can be either value \"asc\" or \"desc\". Optional query parameters: groupByOrderType (bool). This query parameter decides if results are returned in groups based on their entry type. Entries returned in the listing are not automatically converted to their subtype (Folder, Shortcut, Document), so clients who want model-specific information should request it via the GET entry by ID route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entry_listing_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The folder ID. (required)
:param bool group_by_entry_type: An optional query parameter used to indicate if the result should be grouped by entry type or not.
:param str prefer: An optional OData header. Can be used to set the maximum page size using odata.maxpagesize.
:param str select: Limits the properties returned in the result.
:param str orderby: Specifies the order in which items are returned. The maximum number of expressions is 5.
:param int top: Limits the number of items returned from a collection.
:param int skip: Excludes the specified number of items of the queried collection from the result.
:param bool count: Indicates whether the total count of items within a collection are returned in the result.
:return: ODataValueOfIListOfEntry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'group_by_entry_type', 'prefer', 'select', 'orderby', 'top', 'skip', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entry_listing" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `get_entry_listing`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `get_entry_listing`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
if 'group_by_entry_type' in params:
query_params.append(('groupByEntryType', params['group_by_entry_type'])) # noqa: E501
if 'select' in params:
query_params.append(('$select', params['select'])) # noqa: E501
if 'orderby' in params:
query_params.append(('$orderby', params['orderby'])) # noqa: E501
if 'top' in params:
query_params.append(('$top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('$skip', params['skip'])) # noqa: E501
if 'count' in params:
query_params.append(('$count', params['count'])) # noqa: E501
header_params = {}
if 'prefer' in params:
header_params['Prefer'] = params['prefer'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/Laserfiche.Repository.Folder/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ODataValueOfIListOfEntry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_field_values(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_field_values # noqa: E501
- Returns the fields assigned to an entry. - Provide an entry ID, and get a paged listing of all fields assigned to that entry. - Default page size: 100. Allowed OData query options: Select | Count | OrderBy | Skip | Top | SkipToken | Prefer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_field_values(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param str prefer: An optional OData header. Can be used to set the maximum page size using odata.maxpagesize.
:param bool format_value: An optional query parameter used to indicate if the field values should be formatted. The default value is false.
:param str culture: An optional query parameter used to indicate the locale that should be used for formatting. The value should be a standard language tag. The formatValue query parameter must be set to true, otherwise culture will not be used for formatting.
:param str select: Limits the properties returned in the result.
:param str orderby: Specifies the order in which items are returned. The maximum number of expressions is 5.
:param int top: Limits the number of items returned from a collection.
:param int skip: Excludes the specified number of items of the queried collection from the result.
:param bool count: Indicates whether the total count of items within a collection are returned in the result.
:return: ODataValueOfIListOfFieldValue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_field_values_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.get_field_values_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def get_field_values_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_field_values # noqa: E501
- Returns the fields assigned to an entry. - Provide an entry ID, and get a paged listing of all fields assigned to that entry. - Default page size: 100. Allowed OData query options: Select | Count | OrderBy | Skip | Top | SkipToken | Prefer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_field_values_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param str prefer: An optional OData header. Can be used to set the maximum page size using odata.maxpagesize.
:param bool format_value: An optional query parameter used to indicate if the field values should be formatted. The default value is false.
:param str culture: An optional query parameter used to indicate the locale that should be used for formatting. The value should be a standard language tag. The formatValue query parameter must be set to true, otherwise culture will not be used for formatting.
:param str select: Limits the properties returned in the result.
:param str orderby: Specifies the order in which items are returned. The maximum number of expressions is 5.
:param int top: Limits the number of items returned from a collection.
:param int skip: Excludes the specified number of items of the queried collection from the result.
:param bool count: Indicates whether the total count of items within a collection are returned in the result.
:return: ODataValueOfIListOfFieldValue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'prefer', 'format_value', 'culture', 'select', 'orderby', 'top', 'skip', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_field_values" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `get_field_values`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `get_field_values`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
if 'format_value' in params:
query_params.append(('formatValue', params['format_value'])) # noqa: E501
if 'culture' in params:
query_params.append(('culture', params['culture'])) # noqa: E501
if 'select' in params:
query_params.append(('$select', params['select'])) # noqa: E501
if 'orderby' in params:
query_params.append(('$orderby', params['orderby'])) # noqa: E501
if 'top' in params:
query_params.append(('$top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('$skip', params['skip'])) # noqa: E501
if 'count' in params:
query_params.append(('$count', params['count'])) # noqa: E501
header_params = {}
if 'prefer' in params:
header_params['Prefer'] = params['prefer'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/fields', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ODataValueOfIListOfFieldValue', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_link_values_from_entry(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_link_values_from_entry # noqa: E501
- Get the links assigned to an entry. - Provide an entry id, and get a paged listing of links assigned to that entry. - Default page size: 100. Allowed OData query options: Select | Count | OrderBy | Skip | Top | SkipToken | Prefer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_link_values_from_entry(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested entry id. (required)
:param str prefer: An optional odata header. Can be used to set the maximum page size using odata.maxpagesize.
:param str select: Limits the properties returned in the result.
:param str orderby: Specifies the order in which items are returned. The maximum number of expressions is 5.
:param int top: Limits the number of items returned from a collection.
:param int skip: Excludes the specified number of items of the queried collection from the result.
:param bool count: Indicates whether the total count of items within a collection are returned in the result.
:return: ODataValueOfIListOfWEntryLinkInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_link_values_from_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.get_link_values_from_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def get_link_values_from_entry_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_link_values_from_entry # noqa: E501
- Get the links assigned to an entry. - Provide an entry id, and get a paged listing of links assigned to that entry. - Default page size: 100. Allowed OData query options: Select | Count | OrderBy | Skip | Top | SkipToken | Prefer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_link_values_from_entry_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The requested entry id. (required)
:param str prefer: An optional odata header. Can be used to set the maximum page size using odata.maxpagesize.
:param str select: Limits the properties returned in the result.
:param str orderby: Specifies the order in which items are returned. The maximum number of expressions is 5.
:param int top: Limits the number of items returned from a collection.
:param int skip: Excludes the specified number of items of the queried collection from the result.
:param bool count: Indicates whether the total count of items within a collection are returned in the result.
:return: ODataValueOfIListOfWEntryLinkInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'prefer', 'select', 'orderby', 'top', 'skip', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_link_values_from_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `get_link_values_from_entry`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `get_link_values_from_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
if 'select' in params:
query_params.append(('$select', params['select'])) # noqa: E501
if 'orderby' in params:
query_params.append(('$orderby', params['orderby'])) # noqa: E501
if 'top' in params:
query_params.append(('$top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('$skip', params['skip'])) # noqa: E501
if 'count' in params:
query_params.append(('$count', params['count'])) # noqa: E501
header_params = {}
if 'prefer' in params:
header_params['Prefer'] = params['prefer'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/links', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ODataValueOfIListOfWEntryLinkInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tags_assigned_to_entry(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_tags_assigned_to_entry # noqa: E501
- Get the tags assigned to an entry. - Provide an entry ID, and get a paged listing of tags assigned to that entry. - Default page size: 100. Allowed OData query options: Select | Count | OrderBy | Skip | Top | SkipToken | Prefer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tags_assigned_to_entry(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param str prefer: An optional OData header. Can be used to set the maximum page size using odata.maxpagesize.
:param str select: Limits the properties returned in the result.
:param str orderby: Specifies the order in which items are returned. The maximum number of expressions is 5.
:param int top: Limits the number of items returned from a collection.
:param int skip: Excludes the specified number of items of the queried collection from the result.
:param bool count: Indicates whether the total count of items within a collection are returned in the result.
:return: ODataValueOfIListOfWTagInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tags_assigned_to_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.get_tags_assigned_to_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def get_tags_assigned_to_entry_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""get_tags_assigned_to_entry # noqa: E501
- Get the tags assigned to an entry. - Provide an entry ID, and get a paged listing of tags assigned to that entry. - Default page size: 100. Allowed OData query options: Select | Count | OrderBy | Skip | Top | SkipToken | Prefer. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tags_assigned_to_entry_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param str prefer: An optional OData header. Can be used to set the maximum page size using odata.maxpagesize.
:param str select: Limits the properties returned in the result.
:param str orderby: Specifies the order in which items are returned. The maximum number of expressions is 5.
:param int top: Limits the number of items returned from a collection.
:param int skip: Excludes the specified number of items of the queried collection from the result.
:param bool count: Indicates whether the total count of items within a collection are returned in the result.
:return: ODataValueOfIListOfWTagInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'prefer', 'select', 'orderby', 'top', 'skip', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tags_assigned_to_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `get_tags_assigned_to_entry`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `get_tags_assigned_to_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
if 'select' in params:
query_params.append(('$select', params['select'])) # noqa: E501
if 'orderby' in params:
query_params.append(('$orderby', params['orderby'])) # noqa: E501
if 'top' in params:
query_params.append(('$top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('$skip', params['skip'])) # noqa: E501
if 'count' in params:
query_params.append(('$count', params['count'])) # noqa: E501
header_params = {}
if 'prefer' in params:
header_params['Prefer'] = params['prefer'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/tags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ODataValueOfIListOfWTagInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_document(self, repo_id, parent_entry_id, file_name, **kwargs): # noqa: E501
"""import_document # noqa: E501
- Creates a new document in the specified folder. - Optionally sets metadata and electronic document component. - Optional parameter: autoRename (default false). If an entry already exists with the given name, the entry will be automatically renamed. With this route, partial success is possible. The response returns multiple operation (entryCreate operation, setEdoc operation, setLinks operation, etc..) objects, which contain information about any errors that may have occurred during the creation. As long as the entryCreate operation succeeds, the entry will be created, even if all other operations fail. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_document(repo_id, parent_entry_id, file_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int parent_entry_id: The entry ID of the folder that the document will be created in. (required)
:param str file_name: The created document's file name. (required)
:param str electronic_document:
:param PostEntryWithEdocMetadataRequest request:
:param bool auto_rename: An optional query parameter used to indicate if the new document should be automatically renamed if an entry already exists with the given name in the folder. The default value is false.
:return: CreateEntryResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.import_document_with_http_info(repo_id, parent_entry_id, file_name, **kwargs) # noqa: E501
else:
(data) = self.import_document_with_http_info(repo_id, parent_entry_id, file_name, **kwargs) # noqa: E501
return data
def import_document_with_http_info(self, repo_id, parent_entry_id, file_name, **kwargs): # noqa: E501
"""import_document # noqa: E501
- Creates a new document in the specified folder. - Optionally sets metadata and electronic document component. - Optional parameter: autoRename (default false). If an entry already exists with the given name, the entry will be automatically renamed. With this route, partial success is possible. The response returns multiple operation (entryCreate operation, setEdoc operation, setLinks operation, etc..) objects, which contain information about any errors that may have occurred during the creation. As long as the entryCreate operation succeeds, the entry will be created, even if all other operations fail. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_document_with_http_info(repo_id, parent_entry_id, file_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int parent_entry_id: The entry ID of the folder that the document will be created in. (required)
:param str file_name: The created document's file name. (required)
:param str electronic_document:
:param PostEntryWithEdocMetadataRequest request:
:param bool auto_rename: An optional query parameter used to indicate if the new document should be automatically renamed if an entry already exists with the given name in the folder. The default value is false.
:return: CreateEntryResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'parent_entry_id', 'file_name', 'electronic_document', 'request', 'auto_rename'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `import_document`") # noqa: E501
# verify the required parameter 'parent_entry_id' is set
if ('parent_entry_id' not in params or
params['parent_entry_id'] is None):
raise ValueError("Missing the required parameter `parent_entry_id` when calling `import_document`") # noqa: E501
# verify the required parameter 'file_name' is set
if ('file_name' not in params or
params['file_name'] is None):
raise ValueError("Missing the required parameter `file_name` when calling `import_document`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'parent_entry_id' in params:
path_params['parentEntryId'] = params['parent_entry_id'] # noqa: E501
if 'file_name' in params:
path_params['fileName'] = params['file_name'] # noqa: E501
query_params = []
if 'auto_rename' in params:
query_params.append(('autoRename', params['auto_rename'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
if 'electronic_document' in params:
local_var_files['electronicDocument'] = params['electronic_document'] # noqa: E501
if 'request' in params:
form_params.append(('request', params['request'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{parentEntryId}/{fileName}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateEntryResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def move_or_rename_document(self, repo_id, entry_id, **kwargs): # noqa: E501
"""move_or_rename_document # noqa: E501
- Moves and/or renames an entry. - Move and/or rename an entry by passing in the new parent folder ID or name in the JSON body. - Optional parameter: autoRename (default false). If an entry already exists with the given name, the entry will be automatically renamed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.move_or_rename_document(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param PatchEntryRequest body: The request containing the folder ID that the entry will be moved to and the new name
the entry will be renamed to.
:param bool auto_rename: An optional query parameter used to indicate if the entry should be automatically renamed if another entry already exists with the same name in the folder. The default value is false.
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.move_or_rename_document_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.move_or_rename_document_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def move_or_rename_document_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""move_or_rename_document # noqa: E501
- Moves and/or renames an entry. - Move and/or rename an entry by passing in the new parent folder ID or name in the JSON body. - Optional parameter: autoRename (default false). If an entry already exists with the given name, the entry will be automatically renamed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.move_or_rename_document_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository ID. (required)
:param int entry_id: The requested entry ID. (required)
:param PatchEntryRequest body: The request containing the folder ID that the entry will be moved to and the new name
the entry will be renamed to.
:param bool auto_rename: An optional query parameter used to indicate if the entry should be automatically renamed if another entry already exists with the same name in the folder. The default value is false.
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body', 'auto_rename'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method move_or_rename_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `move_or_rename_document`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `move_or_rename_document`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
if 'auto_rename' in params:
query_params.append(('autoRename', params['auto_rename'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Entry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def write_template_value_to_entry(self, repo_id, entry_id, **kwargs): # noqa: E501
"""write_template_value_to_entry # noqa: E501
- Assign a template to an entry. - Provide an entry id, template name, and a list of template fields to assign to that entry. - Only template values will be modified. Any existing independent fields on the entry will not be modified, nor will they be added if included in the request. The only modification to fields will only occur on templated fields. If the previously assigned template includes common template fields as the newly assigned template, the common field values will not be modified. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.write_template_value_to_entry(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The id of entry that will have its template updated. (required)
:param PutTemplateRequest body: The template and template fields that will be assigned to the entry.
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.write_template_value_to_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
else:
(data) = self.write_template_value_to_entry_with_http_info(repo_id, entry_id, **kwargs) # noqa: E501
return data
def write_template_value_to_entry_with_http_info(self, repo_id, entry_id, **kwargs): # noqa: E501
"""write_template_value_to_entry # noqa: E501
- Assign a template to an entry. - Provide an entry id, template name, and a list of template fields to assign to that entry. - Only template values will be modified. Any existing independent fields on the entry will not be modified, nor will they be added if included in the request. The only modification to fields will only occur on templated fields. If the previously assigned template includes common template fields as the newly assigned template, the common field values will not be modified. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.write_template_value_to_entry_with_http_info(repo_id, entry_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repo_id: The requested repository id. (required)
:param int entry_id: The id of entry that will have its template updated. (required)
:param PutTemplateRequest body: The template and template fields that will be assigned to the entry.
:return: Entry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_id', 'entry_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method write_template_value_to_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_id' is set
if ('repo_id' not in params or
params['repo_id'] is None):
raise ValueError("Missing the required parameter `repo_id` when calling `write_template_value_to_entry`") # noqa: E501
# verify the required parameter 'entry_id' is set
if ('entry_id' not in params or
params['entry_id'] is None):
raise ValueError("Missing the required parameter `entry_id` when calling `write_template_value_to_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repo_id' in params:
path_params['repoId'] = params['repo_id'] # noqa: E501
if 'entry_id' in params:
path_params['entryId'] = params['entry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1-alpha/Repositories/{repoId}/Entries/{entryId}/template', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Entry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 51.570659 | 833 | 0.644649 | 14,571 | 114,951 | 4.886281 | 0.031501 | 0.041012 | 0.019467 | 0.023006 | 0.979592 | 0.976277 | 0.97344 | 0.970617 | 0.967597 | 0.96601 | 0 | 0.013872 | 0.271933 | 114,951 | 2,228 | 834 | 51.593806 | 0.836842 | 0.446564 | 0 | 0.82659 | 0 | 0 | 0.223828 | 0.060271 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032205 | false | 0 | 0.009909 | 0 | 0.090008 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
973f907d3a41b9104a9a0b1b21a0dcf3507fc102 | 20,744 | py | Python | pyEX/economic/economic.py | sourcery-ai-bot/pyEX | 21be6b4f72e6f8593df40f6d3632b97ea60c9532 | [
"Apache-2.0"
] | 107 | 2021-03-05T05:18:34.000Z | 2022-03-30T22:25:13.000Z | pyEX/economic/economic.py | sourcery-ai-bot/pyEX | 21be6b4f72e6f8593df40f6d3632b97ea60c9532 | [
"Apache-2.0"
] | 112 | 2021-03-05T03:41:21.000Z | 2021-12-01T03:24:22.000Z | pyEX/economic/economic.py | sourcery-ai-bot/pyEX | 21be6b4f72e6f8593df40f6d3632b97ea60c9532 | [
"Apache-2.0"
] | 37 | 2021-03-04T18:32:09.000Z | 2022-03-24T02:20:12.000Z | # *****************************************************************************
#
# Copyright (c) 2020, the pyEX authors.
#
# This file is part of the pyEX library, distributed under the terms of
# the Apache License 2.0. The full license can be found in the LICENSE file.
#
from enum import Enum
from functools import lru_cache, wraps
from ..common import _expire, _UTC, _timeseriesWrapper
from ..timeseries import timeSeries, timeSeriesDF, timeSeriesAsync
class EconomicPoints(Enum):
"""Economic data points
https://iexcloud.io/docs/api/#economic-data
Attributes:
FEDFUNDS; Effective federal funds rate
GDP; Real Gross Domestic Product
INDPRO; Industrial Production Index
CPI; Consumer Price Index All Urban Consumers
PAYROLL; Total nonfarm employees in thousands of persons seasonally adjusted
HOUSING; Total Housing Starts in thousands of units, seasonally adjusted annual rate
UNEMPLOYMENT; Unemployment rate returned as a percent, seasonally adjusted
VEHICLES; Total Vehicle Sales in millions of units
RECESSION; US Recession Probabilities. Smoothed recession probabilities for the United States are obtained from a dynamic-factor markov-switching model applied to four monthly coincident variables. non-farm payroll employment, the index of industrial production, real personal income excluding transfer payments, and real manufacturing and trade sales.
INITIALCLAIMS; Initial claims returned as a number, seasonally adjusted
RETAILMONEY; Retail money funds returned as billions of dollars, seasonally adjusted
INSTITUTIONALMONEY; Institutional money funds returned as billions of dollars, seasonally adjusted
"""
FEDFUNDS = "FEDFUNDS"
GDP = "A191RL1Q225SBEA"
INDPRO = "INDPRO"
CPI = "CPIAUCSL"
PAYROLL = "PAYEMS"
HOUSING = "HOUST"
UNEMPLOYMENT = "UNRATE"
VEHICLES = "TOTALSA"
RECESSION_PROB = "RECPROUSM156N"
INITIALCLAIMS = "IC4WSA"
RETAILMONEY = "WRMFSL"
INSTITUTIONALMONEY = "WIMFSL"
@staticmethod
@lru_cache(1)
def options():
"""Return a list of the available economic points options"""
return list(map(lambda c: c.value, EconomicPoints))
@_expire(hour=8, tz=_UTC)
def fedfunds(token="", version="stable", filter="", format="json", **timeseries_kwargs):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="FEDFUNDS",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(fedfunds)
def fedfundsDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="FEDFUNDS",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(fedfunds)
async def fedfundsAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="FEDFUNDS",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def gdp(token="", version="stable", filter="", format="json", **timeseries_kwargs):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="A191RL1Q225SBEA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(gdp)
def gdpDF(token="", version="stable", filter="", format="json", **timeseries_kwargs):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="A191RL1Q225SBEA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(gdp)
async def gdpAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="A191RL1Q225SBEA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def indpro(token="", version="stable", filter="", format="json", **timeseries_kwargs):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="INDPRO",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(indpro)
def indproDF(token="", version="stable", filter="", format="json", **timeseries_kwargs):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="INDPRO",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(indpro)
async def indproAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="INDPRO",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def cpi(token="", version="stable", filter="", format="json", **timeseries_kwargs):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="CPIAUCSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(cpi)
def cpiDF(token="", version="stable", filter="", format="json", **timeseries_kwargs):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="CPIAUCSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(cpi)
async def cpiAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="CPIAUCSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def payroll(token="", version="stable", filter="", format="json", **timeseries_kwargs):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="PAYEMS",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(payroll)
def payrollDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="PAYEMS",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(payroll)
async def payrollAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="PAYEMS",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def housing(token="", version="stable", filter="", format="json", **timeseries_kwargs):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="HOUST",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(housing)
def housingDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="HOUST",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(housing)
async def housingAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="HOUST",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def unemployment(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="UNRATE",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(unemployment)
def unemploymentDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="UNRATE",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(unemployment)
async def unemploymentAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="UNRATE",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def vehicles(token="", version="stable", filter="", format="json", **timeseries_kwargs):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="TOTALSA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(vehicles)
def vehiclesDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="TOTALSA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(vehicles)
async def vehiclesAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="TOTALSA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def recessionProb(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="RECPROUSM156N",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(recessionProb)
def recessionProbDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="RECPROUSM156N",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(recessionProb)
async def recessionProbAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="RECPROUSM156N",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def initialClaims(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="IC4WSA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(initialClaims)
def initialClaimsDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="IC4WSA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(initialClaims)
async def initialClaimsAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="IC4WSA",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def institutionalMoney(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="WRMFSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(institutionalMoney)
def institutionalMoneyDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="WRMFSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(institutionalMoney)
async def institutionalMoneyAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="WRMFSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
def retailMoney(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
"""Economic data
https://iexcloud.io/docs/api/#economic-data
Args:
token (str): Access token
version (str): API version
filter (str): filters: https://iexcloud.io/docs/api/#filter-results
format (str): return format, defaults to json
Supports all kwargs from `pyEX.timeseries.timeSeries`
Returns:
dict or DataFrame: result
"""
_timeseriesWrapper(timeseries_kwargs)
return timeSeries(
id="ECONOMIC",
key="WIMFSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(retailMoney)
def retailMoneyDF(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return timeSeriesDF(
id="ECONOMIC",
key="WIMFSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
@_expire(hour=8, tz=_UTC)
@wraps(retailMoney)
async def retailMoneyAsync(
token="", version="stable", filter="", format="json", **timeseries_kwargs
):
_timeseriesWrapper(timeseries_kwargs)
return await timeSeriesAsync(
id="ECONOMIC",
key="WIMFSL",
token=token,
version=version,
filter=filter,
format=format,
**timeseries_kwargs
)
| 25.515375 | 360 | 0.628182 | 2,116 | 20,744 | 6.053403 | 0.096408 | 0.134905 | 0.030916 | 0.036537 | 0.845187 | 0.845187 | 0.843704 | 0.841049 | 0.841049 | 0.832462 | 0 | 0.005574 | 0.247541 | 20,744 | 812 | 361 | 25.546798 | 0.815043 | 0.27338 | 0 | 0.824197 | 0 | 0 | 0.070346 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047259 | false | 0 | 0.007561 | 0 | 0.149338 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9758df8ee55b98dc3463e951b4932b0ef23ef675 | 201 | py | Python | parking_permits_app/pricing/secondary_vehicle.py | mingfeng/parking-permits | d0f5534bbf5a00dda07066d7b3a5dd68befedd59 | [
"MIT"
] | null | null | null | parking_permits_app/pricing/secondary_vehicle.py | mingfeng/parking-permits | d0f5534bbf5a00dda07066d7b3a5dd68befedd59 | [
"MIT"
] | null | null | null | parking_permits_app/pricing/secondary_vehicle.py | mingfeng/parking-permits | d0f5534bbf5a00dda07066d7b3a5dd68befedd59 | [
"MIT"
] | null | null | null | from parking_permits_app.constants import SECONDARY_VEHICLE_PRICE_INCREASE
def apply_secondary_vehicle_price_increase(price=None):
return price + (price / 100) * SECONDARY_VEHICLE_PRICE_INCREASE
| 33.5 | 74 | 0.850746 | 26 | 201 | 6.115385 | 0.576923 | 0.301887 | 0.396226 | 0.54717 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016575 | 0.099502 | 201 | 5 | 75 | 40.2 | 0.861878 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 9 |
c1133cd72f06c4d8c20a2218cf2e58e91b64511c | 39 | py | Python | Compiler Design Lab/ChocoPy_LLVM_Compiler/tests/parse/bad_var_decl.py | Abhishek-Aditya-bs/Lab-Projects-and-Assignments | fd2681a1c7453367a4df1790e58afb312f13998c | [
"MIT"
] | 7 | 2021-08-28T18:20:45.000Z | 2022-02-01T07:35:59.000Z | Compiler Design Lab/ChocoPy_LLVM_Compiler/tests/parse/bad_var_decl.py | Abhishek-Aditya-bs/Lab-Projects-and-Assignments | fd2681a1c7453367a4df1790e58afb312f13998c | [
"MIT"
] | 1 | 2020-05-30T17:57:11.000Z | 2020-05-30T20:44:53.000Z | tests/parse/bad_var_decl.py | yangdanny97/chocopy-python-frontend | d0fb63fc744771640fa4d06076743f42089899c1 | [
"MIT"
] | 2 | 2022-02-05T06:16:16.000Z | 2022-02-24T11:07:09.000Z | def f()->int:
return 3
x:int = f() | 9.75 | 13 | 0.487179 | 8 | 39 | 2.375 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035714 | 0.282051 | 39 | 4 | 14 | 9.75 | 0.642857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0 | 0.333333 | 0.666667 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
c1179dd5b80d8b3c2a5c7193d5672de9a28eee5e | 51,251 | py | Python | xrdsst/api/system_api.py | nordic-institute/X-Road-Security-Server-toolkit | 1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb | [
"MIT"
] | 7 | 2020-11-01T19:50:11.000Z | 2022-01-18T17:45:19.000Z | xrdsst/api/system_api.py | nordic-institute/X-Road-Security-Server-toolkit | 1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb | [
"MIT"
] | 24 | 2020-11-09T08:09:10.000Z | 2021-06-16T07:22:14.000Z | xrdsst/api/system_api.py | nordic-institute/X-Road-Security-Server-toolkit | 1538dbf3d76647f4fb3a72bbe93bf54f414ee9fb | [
"MIT"
] | 1 | 2021-04-27T14:39:48.000Z | 2021-04-27T14:39:48.000Z | # coding: utf-8
"""
X-Road Security Server Admin API
X-Road Security Server Admin API. Note that the error metadata responses described in some endpoints are subjects to change and may be updated in upcoming versions. # noqa: E501
OpenAPI spec version: 1.0.31
Contact: info@niis.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from xrdsst.api_client.api_client import ApiClient
class SystemApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_configured_timestamping_service(self, **kwargs): # noqa: E501
"""add a configured timestamping service # noqa: E501
<h3>Administrator selects a new timestamping service.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_configured_timestamping_service(async_req=True)
>>> result = thread.get()
:param async_req bool
:param TimestampingService body: Timestamping service to add
:return: TimestampingService
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_configured_timestamping_service_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.add_configured_timestamping_service_with_http_info(**kwargs) # noqa: E501
return data
def add_configured_timestamping_service_with_http_info(self, **kwargs): # noqa: E501
"""add a configured timestamping service # noqa: E501
<h3>Administrator selects a new timestamping service.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_configured_timestamping_service_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param TimestampingService body: Timestamping service to add
:return: TimestampingService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_configured_timestamping_service" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/timestamping-services', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimestampingService', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_configured_timestamping_service(self, **kwargs): # noqa: E501
"""delete configured timestamping service # noqa: E501
<h3>Administrator removes a configured timestamping service.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_configured_timestamping_service(async_req=True)
>>> result = thread.get()
:param async_req bool
:param TimestampingService body: Timestamping service to delete
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_configured_timestamping_service_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_configured_timestamping_service_with_http_info(**kwargs) # noqa: E501
return data
def delete_configured_timestamping_service_with_http_info(self, **kwargs): # noqa: E501
"""delete configured timestamping service # noqa: E501
<h3>Administrator removes a configured timestamping service.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_configured_timestamping_service_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param TimestampingService body: Timestamping service to delete
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_configured_timestamping_service" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/timestamping-services/delete', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def download_anchor(self, **kwargs): # noqa: E501
"""download configuration anchor information # noqa: E501
<h3>Administrator downloads the configuration anchor information.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_anchor(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.download_anchor_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.download_anchor_with_http_info(**kwargs) # noqa: E501
return data
def download_anchor_with_http_info(self, **kwargs): # noqa: E501
"""download configuration anchor information # noqa: E501
<h3>Administrator downloads the configuration anchor information.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_anchor_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method download_anchor" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/anchor/download', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def download_system_certificate(self, **kwargs): # noqa: E501
"""download the security server certificate as gzip compressed tar archive # noqa: E501
<h3>Administrator downloads the security server TLS certificate.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_system_certificate(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.download_system_certificate_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.download_system_certificate_with_http_info(**kwargs) # noqa: E501
return data
def download_system_certificate_with_http_info(self, **kwargs): # noqa: E501
"""download the security server certificate as gzip compressed tar archive # noqa: E501
<h3>Administrator downloads the security server TLS certificate.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_system_certificate_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method download_system_certificate" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/gzip']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/certificate/export', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def generate_system_certificate_request(self, **kwargs): # noqa: E501
"""generate new certificate request # noqa: E501
<h3>Administrator generates a new certificate request.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_system_certificate_request(async_req=True)
>>> result = thread.get()
:param async_req bool
:param DistinguishedName body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.generate_system_certificate_request_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.generate_system_certificate_request_with_http_info(**kwargs) # noqa: E501
return data
def generate_system_certificate_request_with_http_info(self, **kwargs): # noqa: E501
"""generate new certificate request # noqa: E501
<h3>Administrator generates a new certificate request.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_system_certificate_request_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param DistinguishedName body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method generate_system_certificate_request" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/certificate/csr', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def generate_system_tls_key_and_certificate(self, **kwargs): # noqa: E501
"""generate a new internal TLS key and cert # noqa: E501
<h3>Administrator generates new internal TLS key and certificate.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_system_tls_key_and_certificate(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.generate_system_tls_key_and_certificate_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.generate_system_tls_key_and_certificate_with_http_info(**kwargs) # noqa: E501
return data
def generate_system_tls_key_and_certificate_with_http_info(self, **kwargs): # noqa: E501
"""generate a new internal TLS key and cert # noqa: E501
<h3>Administrator generates new internal TLS key and certificate.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_system_tls_key_and_certificate_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method generate_system_tls_key_and_certificate" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/certificate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_anchor(self, **kwargs): # noqa: E501
"""view the configuration anchor information # noqa: E501
<h3>Administrator views the configuration anchor information.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_anchor(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: Anchor
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_anchor_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_anchor_with_http_info(**kwargs) # noqa: E501
return data
def get_anchor_with_http_info(self, **kwargs): # noqa: E501
"""view the configuration anchor information # noqa: E501
<h3>Administrator views the configuration anchor information.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_anchor_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: Anchor
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_anchor" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/anchor', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Anchor', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_configured_timestamping_services(self, **kwargs): # noqa: E501
"""view the configured timestamping services # noqa: E501
<h3>Administrator views the configured timestamping services.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_configured_timestamping_services(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TimestampingService]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_configured_timestamping_services_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_configured_timestamping_services_with_http_info(**kwargs) # noqa: E501
return data
def get_configured_timestamping_services_with_http_info(self, **kwargs): # noqa: E501
"""view the configured timestamping services # noqa: E501
<h3>Administrator views the configured timestamping services.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_configured_timestamping_services_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TimestampingService]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_configured_timestamping_services" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/timestamping-services', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TimestampingService]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_system_certificate(self, **kwargs): # noqa: E501
"""view the security server certificate information # noqa: E501
<h3>Administrator views the security server TLS certificate information.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_system_certificate(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: CertificateDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_system_certificate_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_system_certificate_with_http_info(**kwargs) # noqa: E501
return data
def get_system_certificate_with_http_info(self, **kwargs): # noqa: E501
"""view the security server certificate information # noqa: E501
<h3>Administrator views the security server TLS certificate information.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_system_certificate_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: CertificateDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_system_certificate" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/certificate', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CertificateDetails', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_system_certificate(self, **kwargs): # noqa: E501
"""import new internal TLS certificate. # noqa: E501
<h3>Administrator imports a new internal TLS certificate</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_system_certificate(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: certificate to add
:return: CertificateDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.import_system_certificate_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.import_system_certificate_with_http_info(**kwargs) # noqa: E501
return data
def import_system_certificate_with_http_info(self, **kwargs): # noqa: E501
"""import new internal TLS certificate. # noqa: E501
<h3>Administrator imports a new internal TLS certificate</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_system_certificate_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: certificate to add
:return: CertificateDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_system_certificate" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/certificate/import', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CertificateDetails', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def preview_anchor(self, **kwargs): # noqa: E501
"""Read and the configuration anchor file and return the hash for a preview. # noqa: E501
<h3>Administrator wants to preview a configuration anchor file hash.</h3> <p>The instance of the anchor is also validated unless the <code>validate_instance</code> query parameter is explicitly set to false. The anchor will not be saved.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.preview_anchor(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: configuration anchor
:param bool validate_instance: Whether or not to validate the owner instance of the anchor. Set this to false explicitly when previewing an anchor in the security server initialization phase. Default value is true if the parameter is omitted.
:return: Anchor
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.preview_anchor_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.preview_anchor_with_http_info(**kwargs) # noqa: E501
return data
def preview_anchor_with_http_info(self, **kwargs): # noqa: E501
"""Read and the configuration anchor file and return the hash for a preview. # noqa: E501
<h3>Administrator wants to preview a configuration anchor file hash.</h3> <p>The instance of the anchor is also validated unless the <code>validate_instance</code> query parameter is explicitly set to false. The anchor will not be saved.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.preview_anchor_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: configuration anchor
:param bool validate_instance: Whether or not to validate the owner instance of the anchor. Set this to false explicitly when previewing an anchor in the security server initialization phase. Default value is true if the parameter is omitted.
:return: Anchor
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'validate_instance'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method preview_anchor" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'validate_instance' in params:
query_params.append(('validate_instance', params['validate_instance'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/anchor/previews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Anchor', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_anchor(self, **kwargs): # noqa: E501
"""Upload a configuration anchor file to replace an existing one. # noqa: E501
<h3>Administrator uploads a configuration anchor file anytime after the Security Server has been initialized.</h3> <p> <b>Note that this only works if there already exists an anchor that can be replaced.</b> When initalizing a new Security Server, use the endpoint <code>POST /system/anchor</code> instead. </p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_anchor(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: configuration anchor
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_anchor_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.replace_anchor_with_http_info(**kwargs) # noqa: E501
return data
def replace_anchor_with_http_info(self, **kwargs): # noqa: E501
"""Upload a configuration anchor file to replace an existing one. # noqa: E501
<h3>Administrator uploads a configuration anchor file anytime after the Security Server has been initialized.</h3> <p> <b>Note that this only works if there already exists an anchor that can be replaced.</b> When initalizing a new Security Server, use the endpoint <code>POST /system/anchor</code> instead. </p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_anchor_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: configuration anchor
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_anchor" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/anchor', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def system_version(self, **kwargs): # noqa: E501
"""get information for the system version # noqa: E501
<h3>Administrator views key details.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.system_version(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: Version
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.system_version_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.system_version_with_http_info(**kwargs) # noqa: E501
return data
def system_version_with_http_info(self, **kwargs): # noqa: E501
"""get information for the system version # noqa: E501
<h3>Administrator views key details.</h3> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.system_version_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: Version
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method system_version" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/version', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Version', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def upload_initial_anchor(self, **kwargs): # noqa: E501
"""Upload a new configuration anchor file when initializing a new security server. # noqa: E501
<h3>Administrator uploads a new configuration anchor file in the security server's initialization phase.</h3> <p> Calls to this endpoint only succeed if a configuration anchor is not already found – meaning that <b>this endpoint can only be used when initializing a new security server</b>. For updating the anchor for an already initialized security server use the <code>PUT /system/anchor</code> endpoint instead. </p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_initial_anchor(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: configuration anchor
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.upload_initial_anchor_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.upload_initial_anchor_with_http_info(**kwargs) # noqa: E501
return data
def upload_initial_anchor_with_http_info(self, **kwargs): # noqa: E501
"""Upload a new configuration anchor file when initializing a new security server. # noqa: E501
<h3>Administrator uploads a new configuration anchor file in the security server's initialization phase.</h3> <p> Calls to this endpoint only succeed if a configuration anchor is not already found – meaning that <b>this endpoint can only be used when initializing a new security server</b>. For updating the anchor for an already initialized security server use the <code>PUT /system/anchor</code> endpoint instead. </p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_initial_anchor_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: configuration anchor
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_initial_anchor" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/system/anchor', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.423846 | 442 | 0.618817 | 5,796 | 51,251 | 5.223602 | 0.043996 | 0.047827 | 0.025895 | 0.033294 | 0.972784 | 0.968952 | 0.964559 | 0.956797 | 0.955014 | 0.945964 | 0 | 0.016869 | 0.295604 | 51,251 | 1,299 | 443 | 39.454196 | 0.821722 | 0.369417 | 0 | 0.833575 | 0 | 0 | 0.15711 | 0.05281 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041968 | false | 0 | 0.014472 | 0 | 0.118669 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c122e3b603ed130b1f47a28c1a21fc2bfd3cb0b8 | 5,986 | py | Python | Demos/classifiers.py | BeatHubmann/18F-IML | 4103da591b760edd6f3d98849a867d7cbe08a84f | [
"MIT"
] | null | null | null | Demos/classifiers.py | BeatHubmann/18F-IML | 4103da591b760edd6f3d98849a867d7cbe08a84f | [
"MIT"
] | null | null | null | Demos/classifiers.py | BeatHubmann/18F-IML | 4103da591b760edd6f3d98849a867d7cbe08a84f | [
"MIT"
] | null | null | null | import numpy as np
from util import dist
class Classifier(object):
"""docstring for Classifier"""
def __init__(self, X, Y):
super().__init__()
self._Xtr = X
self._Ytr = Y
self._Xtest = None
self._Ytest = None
self._w = None
self._class_cost = np.array([1, 1])
def load_data(self, X, Y):
self._Xtr = X
self._Ytr = Y
def load_test_data(self, X, Y):
self._Xtest = X
self._Ytest = Y
def set_weights(self, w):
self._w = w
def set_class_cost(self, cost_array):
self._class_cost = cost_array
def get_number_samples(self):
return self._Xtr.shape[0]
def predict(self, X, w=None):
pass
def loss(self, w, indexes):
pass
def gradient(self, w, indexes):
pass
def test_loss(self, w):
pass
class Perceptron(Classifier):
"""docstring for Perceptron"""
def __init__(self, X, Y):
super().__init__(X, Y)
self._w = np.random.randn(X.shape[1])
def predict(self, X, w=None):
if w is None:
w = self._w
z = np.dot(X, w)
return np.sign(z)
def loss(self, w, indexes=None):
if indexes is None:
indexes = np.arange(0, self.get_number_samples(), 1)
error = -np.dot(self._Xtr[indexes, :], w) * self._Ytr[indexes]
error[error < 0] = 0.
error_idx = ((self._Ytr[indexes][error > 0] + 1) / 2).astype(
np.int) # (y+1)/2 maps {-1,1} to {0, 1} for indexing
weighted_error = self._class_cost[error_idx] * error[error > 0]
return np.sum(weighted_error) / indexes.size
def gradient(self, w, indexes=None):
if indexes is None:
indexes = np.arange(0, self.get_number_samples(), 1)
error = -np.dot(self._Xtr[indexes, :], w) * self._Ytr[indexes]
gradient = -self._Xtr[indexes, :] * self._Ytr[indexes, np.newaxis]
gradient[error < 0] = 0
error_idx = ((self._Ytr[indexes][error > 0] + 1) / 2).astype(
np.int) # (y+1)/2 maps {-1,1} to {0, 1} for indexing
weighted_grad = self._class_cost[error_idx, np.newaxis] * gradient[error > 0]
return np.sum(weighted_grad, axis=0)
def test_loss(self, w):
error = -np.dot(self._Xtest, w) * self._Ytest
error[error < 0] = 0.
error_idx = ((self._Ytest[error > 0] + 1) / 2).astype(np.int) # (y+1)/2 maps {-1,1} to {0, 1} for indexing
weighted_error = self._class_cost[error_idx] * error[error > 0]
return np.sum(weighted_error) / self._Ytest.size
class SVM(Classifier):
"""docstring for Perceptron"""
def __init__(self, X, Y):
super().__init__(X, Y)
self._w = np.random.randn(X.shape[1])
def predict(self, X, w=None):
if w is None:
w = self._w
z = np.dot(X, w)
return np.sign(z)
def loss(self, w, indexes=None):
if indexes is None:
indexes = np.arange(0, self.get_number_samples(), 1)
error = 1 - np.dot(self._Xtr[indexes, :], w) * self._Ytr[indexes]
error[error < 0] = 0
error_idx = ((self._Ytr[indexes][error > 0] + 1) / 2).astype(
np.int) # (y+1)/2 maps {-1,1} to {0, 1} for indexing
weighted_error = self._class_cost[error_idx] * error[error > 0]
return np.sum(weighted_error) / indexes.size
def gradient(self, w, indexes=None):
if indexes is None:
indexes = np.arange(0, self.get_number_samples(), 1)
error = 1 - np.dot(self._Xtr[indexes, :], w) * self._Ytr[indexes]
gradient = -self._Xtr[indexes, :] * self._Ytr[indexes, np.newaxis]
gradient[error < 0] = 0
error_idx = ((self._Ytr[indexes][error > 0] + 1) / 2).astype(
np.int) # (y+1)/2 maps {-1,1} to {0, 1} for indexing
weighted_grad = self._class_cost[error_idx, np.newaxis] * gradient[error > 0]
return np.sum(weighted_grad, axis=0)
def test_loss(self, w):
error = 1 - np.dot(self._Xtest, w) * self._Ytest
error[error < 0] = 0
error_idx = ((self._Ytest[error > 0] + 1) / 2).astype(
np.int) # (y+1)/2 maps {-1,1} to {0, 1} for indexing
weighted_error = self._class_cost[error_idx] * error[error > 0]
return np.sum(weighted_error) / self._Ytest.size
class Logistic(Classifier):
"""docstring for Logistic"""
def __init__(self, X, Y):
super().__init__(X, Y)
self._w = np.random.randn(X.shape[1])
def predict(self, X, w=None):
if w is None:
w = self._w
z = np.dot(X, w)
return 1 / (1 + np.exp(-z))
def loss(self, w, indexes=None):
if indexes is None:
indexes = np.arange(0, self.get_number_samples(), 1)
z = np.dot(self._Xtr[indexes, :], w) * self._Ytr[indexes]
error = np.log(1 + np.exp(-z))
return np.sum(error) / indexes.size
def gradient(self, w, indexes=None):
if indexes is None:
indexes = np.arange(0, self.get_number_samples(), 1)
z = np.dot(self._Xtr[indexes, :], w) * self._Ytr[indexes]
alpha = (np.exp(-z) / (1 + np.exp(-z)) * self._Ytr[indexes])
gradient = -(alpha[:, np.newaxis] * self._Xtr[indexes, :])
return np.sum(gradient, axis=0)
def test_loss(self, w):
z = np.dot(self._Xtest, w) * self._Ytest
error = np.log(1 + np.exp(-z))
return np.sum(error) / self._Ytest.size
class kNN(Classifier):
def __init__(self, X, Y, k=1):
super().__init__(X, Y)
self._w = k
def set_k(self, k):
self._w = k
def get_k(self):
return k
def predict(self, X):
Y = np.zeros((X.shape[0]))
i = 0
for x in X:
D = dist(self._Xtr, x)
indexes = np.argsort(D, axis=0)[0:self._w]
Y[i] = np.sum(self._Ytr[indexes])
i += 1
return np.sign(Y)
| 29.93 | 115 | 0.552957 | 887 | 5,986 | 3.547914 | 0.087937 | 0.038132 | 0.062282 | 0.042262 | 0.812838 | 0.777884 | 0.74579 | 0.732126 | 0.7245 | 0.7245 | 0 | 0.025392 | 0.296024 | 5,986 | 199 | 116 | 30.080402 | 0.721405 | 0.059472 | 0 | 0.664336 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.202797 | false | 0.027972 | 0.013986 | 0.013986 | 0.356643 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c12b26afeef83e0ac66aa968ed1ee7c3d98a39d3 | 128 | py | Python | support/tickets/admin.py | UladzislauBaranau/support-api | c453fd6ecc09027ee49d8f582c54521627ddf1a6 | [
"MIT"
] | null | null | null | support/tickets/admin.py | UladzislauBaranau/support-api | c453fd6ecc09027ee49d8f582c54521627ddf1a6 | [
"MIT"
] | null | null | null | support/tickets/admin.py | UladzislauBaranau/support-api | c453fd6ecc09027ee49d8f582c54521627ddf1a6 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Message, Ticket
admin.site.register(Ticket)
admin.site.register(Message)
| 18.285714 | 35 | 0.8125 | 18 | 128 | 5.777778 | 0.555556 | 0.211538 | 0.288462 | 0.442308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.101563 | 128 | 6 | 36 | 21.333333 | 0.904348 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
c14565047b47912620085c943d16fcf4b3ecd3e0 | 26,238 | py | Python | tests/test_property.py | Informasjonsforvaltning/modelldcatnotordf | 995129ff9f6fb95f9a9d875b27f3aa14bac9b7f1 | [
"Apache-2.0"
] | 1 | 2020-11-29T18:36:21.000Z | 2020-11-29T18:36:21.000Z | tests/test_property.py | Informasjonsforvaltning/modelldcatnotordf | 995129ff9f6fb95f9a9d875b27f3aa14bac9b7f1 | [
"Apache-2.0"
] | 142 | 2020-10-07T08:52:55.000Z | 2021-11-18T15:09:31.000Z | tests/test_property.py | Informasjonsforvaltning/modelldcatnotordf | 995129ff9f6fb95f9a9d875b27f3aa14bac9b7f1 | [
"Apache-2.0"
] | null | null | null | """Test cases for the property module."""
from typing import List, Union
from concepttordf import Concept
from datacatalogtordf import URI
import pytest
from pytest_mock import MockFixture
from rdflib import Graph
from skolemizer.testutils import skolemization, SkolemUtils
from modelldcatnotordf.modelldcatno import (
ModelElement,
ModelProperty,
Module,
ObjectType,
Role,
)
from tests.testutils import assert_isomorphic
"""
A test class for testing the class Property.
"""
def test_instantiate_resource_should_fail_with_typeerror() -> None:
"""It returns a TypeErro exception."""
with pytest.raises(TypeError):
_ = ModelProperty() # type: ignore
def test_to_graph_should_return_skolemization(mocker: MockFixture) -> None:
"""It returns a property graph as blank node isomorphic to spec."""
property = Role()
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Role .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_identifier() -> None:
"""It returns an identifier graph isomorphic to spec."""
property = Role()
property.identifier = "http://example.com/properties/1"
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/properties/1> a modelldcatno:Role .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_type_both_identifiers() -> None:
"""It returns a has_type graph isomorphic to spec."""
property = Role()
property.identifier = "http://example.com/properties/1"
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
has_types: List[Union[ModelElement, URI]] = [modelelement]
property.has_type = has_types
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/properties/1> a modelldcatno:Role ;
modelldcatno:hasType <http://example.com/modelelements/1> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
.
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_type_skolemization_property_id(
mocker: MockFixture,
) -> None:
"""It returns a has_type graph isomorphic to spec."""
property = Role()
property.identifier = "http://example.com/properties/1"
modelelement = ObjectType()
property.has_type.append(modelelement)
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/properties/1> a modelldcatno:Role ;
modelldcatno:hasType
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
.
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:ObjectType .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_type_skolemization_modelelement_id(
mocker: MockFixture,
) -> None:
"""It returns a has_type graph isomorphic to spec."""
property = Role()
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
property.has_type.append(modelelement)
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Role ;
modelldcatno:hasType <http://example.com/modelelements/1>
.
<http://example.com/modelelements/1> a modelldcatno:ObjectType .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_type_both_skolemizations(
mocker: MockFixture,
) -> None:
"""It returns a has_type graph isomorphic to spec."""
property = Role()
modelelement = ObjectType()
property.has_type.append(modelelement)
skolemutils = SkolemUtils()
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=skolemutils.get_skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Role ; modelldcatno:hasType
<http://example.com/.well-known/skolem/21043186-80ce-11eb-9829-cf7c8fc855ce> .
<http://example.com/.well-known/skolem/21043186-80ce-11eb-9829-cf7c8fc855ce>
a modelldcatno:ObjectType .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_min_occurs() -> None:
"""It returns a min_occurs graph isomorphic to spec."""
property = Role()
property.identifier = "http://example.com/properties/1"
property.min_occurs = 1
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/properties/1> a modelldcatno:Role ;
xsd:minOccurs 1 .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_max_occurs() -> None:
"""It returns a max_occurs graph isomorphic to spec."""
property = Role()
property.identifier = "http://example.com/properties/1"
property.max_occurs = "1"
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/properties/1> a modelldcatno:Role ;
xsd:maxOccurs "1"^^xsd:nonNegativeInteger .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_title_and_identifier() -> None:
"""It returns a title graph isomorphic to spec."""
"""It returns an identifier graph isomorphic to spec."""
modelproperty = Role()
modelproperty.identifier = "http://example.com/properties/1"
modelproperty.title = {"nb": "Tittel 1", "en": "Title 1"}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/properties/1> a modelldcatno:Role;
dct:title "Title 1"@en, "Tittel 1"@nb ;
.
"""
g1 = Graph().parse(data=modelproperty.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_subject() -> None:
"""It returns a subject graph isomorphic to spec."""
modelproperty = Role()
modelproperty.identifier = "http://example.com/properties/1"
subject = Concept()
subject.identifier = "https://example.com/subjects/1"
modelproperty.subject = subject
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/properties/1> a modelldcatno:Role ;
dct:subject <https://example.com/subjects/1> ;
.
<https://example.com/subjects/1> a skos:Concept .
"""
g1 = Graph().parse(data=modelproperty.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_description() -> None:
"""It returns a description graph isomorphic to spec."""
"""It returns an identifier graph isomorphic to spec."""
modelproperty = Role()
modelproperty.identifier = "http://example.com/modelpropertys/1"
modelproperty.description = {"nb": "Beskrivelse", "en": "Description"}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelpropertys/1> a modelldcatno:Role ;
dct:description "Description"@en, "Beskrivelse"@nb ;
.
"""
g1 = Graph().parse(data=modelproperty.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_belongs_to_module_str(mocker: MockFixture,) -> None:
"""It returns a belongs_to_module graph isomorphic to spec."""
modelproperty = Role()
modelproperty.identifier = "http://example.com/properties/1"
module = "http://www.example.org/core"
belongs_to_module: List[Union[Module, str]] = [module]
modelproperty.belongs_to_module = belongs_to_module
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/properties/1> a modelldcatno:Role ;
modelldcatno:belongsToModule
<http://www.example.org/core>
.
"""
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
g1 = Graph().parse(data=modelproperty.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_belongs_to_module_as_graph(mocker: MockFixture) -> None:
"""It returns a belongs_to_module graph isomorphic to spec."""
modelproperty = Role()
modelproperty.identifier = "http://example.com/properties/1"
module = Module()
module.title = {None: "core"}
modelproperty.belongs_to_module = [module]
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/properties/1> a modelldcatno:Role ;
modelldcatno:belongsToModule
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
.
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Module ;
dct:title "core"
.
"""
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
g1 = Graph().parse(data=modelproperty.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_forms_symmetry_with() -> None:
"""It returns an identifier graph isomorphic to spec."""
modelproperty1 = Role()
modelproperty1.identifier = "http://example.com/properties/1"
modelproperty2 = Role()
modelproperty2.identifier = "http://example.com/properties/2"
modelproperty1.forms_symmetry_with = modelproperty2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/properties/1>
a modelldcatno:Role;
modelldcatno:formsSymmetryWith <http://example.com/properties/2> .
<http://example.com/properties/2>
a modelldcatno:Role .
"""
g1 = Graph().parse(data=modelproperty1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_forms_symmetry_with_skolemization(
mocker: MockFixture,
) -> None:
"""It returns an identifier graph isomorphic to spec."""
modelproperty1 = Role()
modelproperty1.identifier = "http://example.com/properties/1"
modelproperty2 = Role()
modelproperty2.title = {"ru": "заглавие", "nb": "Tittel", "en": "Title"}
modelproperty1.forms_symmetry_with = modelproperty2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/properties/1> a modelldcatno:Role;
modelldcatno:formsSymmetryWith
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
.
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Role ;
dct:title
"заглавие"@ru,
"Title"@en,
"Tittel"@nb
.
"""
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
g1 = Graph().parse(data=modelproperty1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_relation_property_label() -> None:
"""It returns a relation_property_label graph isomorphic to spec."""
"""It returns an identifier graph isomorphic to spec."""
modelproperty = Role()
modelproperty.identifier = "http://example.com/modelpropertys/1"
modelproperty.relation_property_label = {
"nb": "Navn på relasjon mellom to egenskaper.",
"en": "A relation property label",
}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelpropertys/1> a modelldcatno:Role ;
modelldcatno:relationPropertyLabel
"A relation property label"@en,
"Navn på relasjon mellom to egenskaper."@nb ;
.
"""
g1 = Graph().parse(data=modelproperty.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_sequence_number() -> None:
"""It returns a sequence_number graph isomorphic to spec."""
property = Role()
property.identifier = "http://example.com/properties/1"
property.sequence_number = 1
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/properties/1> a modelldcatno:Role ;
modelldcatno:sequenceNumber "1"^^xsd:positiveInteger .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_type_as_uri() -> None:
"""It returns a has_type graph isomorphic to spec."""
property = Role()
property.identifier = "http://example.com/properties/1"
modelelement = "http://example.com/modelelements/1"
has_types: List[Union[ModelElement, URI]] = [modelelement]
property.has_type = has_types
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/properties/1> a modelldcatno:Role ;
modelldcatno:hasType <http://example.com/modelelements/1> .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_subject_as_uri() -> None:
"""It returns a subject graph isomorphic to spec."""
modelproperty = Role()
modelproperty.identifier = "http://example.com/properties/1"
subject = "https://example.com/subjects/1"
modelproperty.subject = subject
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/properties/1> a modelldcatno:Role ;
dct:subject <https://example.com/subjects/1> ;
.
"""
g1 = Graph().parse(data=modelproperty.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_forms_symmetry_with_as_uri() -> None:
"""It returns an identifier graph isomorphic to spec."""
modelproperty1 = Role()
modelproperty1.identifier = "http://example.com/properties/1"
modelproperty2 = "http://example.com/properties/2"
modelproperty1.forms_symmetry_with = modelproperty2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/properties/1>
a modelldcatno:Role;
modelldcatno:formsSymmetryWith <http://example.com/properties/2> .
"""
g1 = Graph().parse(data=modelproperty1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_navigable() -> None:
"""It returns an navigable graph isomorphic to spec."""
modelproperty = Role()
modelproperty.identifier = "http://example.com/properties/1"
modelproperty.navigable = True
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/properties/1>
a modelldcatno:Role;
modelldcatno:navigable "true"^^xsd:boolean .
"""
g1 = Graph().parse(data=modelproperty.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_max_occurs_asterisk() -> None:
"""It returns a max_occurs graph isomorphic to spec."""
property = Role()
property.identifier = "http://example.com/properties/1"
property.max_occurs = "*"
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/properties/1> a modelldcatno:Role ;
xsd:maxOccurs "*" .
"""
g1 = Graph().parse(data=property.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_min_occurs_0(mocker: MockFixture) -> None:
"""It returns a role graph isomorphic to spec."""
role = Role()
role.min_occurs = 0
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Role ;
xsd:minOccurs 0
.
"""
skolemutils = SkolemUtils()
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=skolemutils.get_skolemization,
)
g1 = Graph().parse(data=role.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_sequence_number_0(mocker: MockFixture) -> None:
"""It returns a role graph isomorphic to spec."""
role = Role()
role.sequence_number = 0
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Role ;
modelldcatno:sequenceNumber "0"^^xsd:positiveInteger
.
"""
skolemutils = SkolemUtils()
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=skolemutils.get_skolemization,
)
g1 = Graph().parse(data=role.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
| 35.313594 | 88 | 0.641017 | 3,279 | 26,238 | 5.042696 | 0.053065 | 0.037254 | 0.04681 | 0.062413 | 0.914243 | 0.900756 | 0.890233 | 0.887209 | 0.887209 | 0.881161 | 0 | 0.044358 | 0.192355 | 26,238 | 742 | 89 | 35.361186 | 0.735926 | 0.04947 | 0 | 0.76673 | 0 | 0.128107 | 0.607329 | 0.031479 | 0 | 0 | 0 | 0 | 0.047801 | 1 | 0.047801 | false | 0 | 0.017208 | 0 | 0.06501 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c1943d0f1a46c388807faf079b44ba35983c1fd4 | 28,327 | py | Python | pySDC/implementations/datatype_classes/particles.py | janEbert/pySDC | 167d78c4118bc3a5a446ec973fe65fb35db94471 | [
"BSD-2-Clause"
] | null | null | null | pySDC/implementations/datatype_classes/particles.py | janEbert/pySDC | 167d78c4118bc3a5a446ec973fe65fb35db94471 | [
"BSD-2-Clause"
] | null | null | null | pySDC/implementations/datatype_classes/particles.py | janEbert/pySDC | 167d78c4118bc3a5a446ec973fe65fb35db94471 | [
"BSD-2-Clause"
] | 1 | 2021-07-27T11:44:54.000Z | 2021-07-27T11:44:54.000Z |
import copy as cp
import numpy as np
from pySDC.core.Errors import DataError
class particles(object):
"""
Particle data type for particles in 3 dimensions
This data type can be used for particles in 3 dimensions with 3 position and 3 velocity values per particle
Attributes:
pos: contains the positions of all particles
vel: contains the velocities of all particles
"""
class position(object):
"""
Position data type for particles in 3 dimensions
Attributes:
values (np.ndarray): array with 3 position values per particle (dim. 3*nparts)
"""
def __init__(self, init=None, val=None):
"""
Initialization routine
Args:
init: can either be a number or another position object
val: initial value (default: None)
Raises:
DataError: if init is none of the types above
"""
# if init is another position, do a copy (init by copy)
if isinstance(init, type(self)):
self.values = init.values.copy()
# if init is a number, create position object with val as initial value
elif isinstance(init, int) or isinstance(init, tuple):
self.values = np.empty(init)
self.values[:] = val
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __add__(self, other):
"""
Overloading the addition operator for position types
Args:
other (position): position object to be added
Raises:
DataError: if other is not a position object
Returns:
position: sum of caller and other values (self+other)
"""
if isinstance(other, type(self)):
# always create new position, since otherwise c = a + b changes a as well!
pos = particles.position(self.values.shape)
pos.values = self.values + other.values
return pos
else:
raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __sub__(self, other):
"""
Overloading the subtraction operator for position types
Args:
other (position): position object to be subtracted
Raises:
DataError: if other is not a position object
Returns:
position: differences between caller and other values (self-other)
"""
if isinstance(other, type(self)):
# always create new position, since otherwise c = a - b changes a as well!
pos = particles.position(self.values.shape)
pos.values = self.values - other.values
return pos
else:
raise DataError("Type error: cannot subtract %s from %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the right multiply by factor operator for position types
Args:
other (float): factor
Raises:
DataError: is other is not a float
Returns:
position: original values scaled by factor
"""
if isinstance(other, float):
# create new position
pos = particles.position(self.values.shape)
pos.values = self.values * other
return pos
else:
raise DataError("Type error: cannot multiply %s to %s" % (type(other), type(self)))
def __abs__(self):
"""
Overloading the abs operator for position types
Returns:
float: absolute maximum of all position values
"""
return np.amax(np.absolute(self.values))
class velocity(object):
"""
Velocity data type for particles in 3 dimensions
Attributes:
values (np.ndarray): array with 3 velocity values per particle (dim. 3*nparts)
"""
def __init__(self, init=None, val=None):
"""
Initialization routine
Args:
init: can either be a number or another velocity object
val: initial value (default: None)
Raises:
DataError: if init is none of the types above
"""
# if init is another velocity, do a copy (init by copy)
if isinstance(init, type(self)):
self.values = init.values.copy()
# if init is a number, create velocity object with val as initial value
elif isinstance(init, int) or isinstance(init, tuple):
self.values = np.empty(init)
self.values[:] = val
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __add__(self, other):
"""
Overloading the addition operator for velocity types
Args:
other: velocity object to be added
Raises:
DataError: if other is not a velocity object
Returns:
velocity: sum of caller and other values (self+other)
"""
if isinstance(other, type(self)):
# always create new position, since otherwise c = a + b changes a as well!
vel = particles.velocity(self.values.shape)
vel.values = self.values + other.values
return vel
else:
raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __sub__(self, other):
"""
Overloading the subtraction operator for velocity types
Args:
other: velocity object to be subtracted
Raises:
DataError: if other is not a velocity object
Returns:
velocity: differences between caller and other values (self-other)
"""
if isinstance(other, type(self)):
# always create new position, since otherwise c = a - b changes a as well!
vel = particles.velocity(self.values.shape)
vel.values = self.values - other.values
return vel
else:
raise DataError("Type error: cannot subtract %s from %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the right multiply by factor operator for velocity types
Args:
other: float factor
Raises:
DataError: is other is not a float
Returns:
position: original values scaled by factor, transformed to position
"""
if isinstance(other, float):
# create new position, interpret float factor as time (time x velocity = position)
pos = particles.position(self.values.shape)
pos.values = self.values * other
return pos
else:
raise DataError("Type error: cannot multiply %s to %s" % (type(other), type(self)))
def __abs__(self):
"""
Overloading the abs operator for velocity types
Returns:
float: absolute maximum of all velocity values
"""
# FIXME: is this a good idea for multiple particles?
return np.amax(np.absolute(self.values))
def __init__(self, init=None, val=None):
"""
Initialization routine
Args:
init: can either be a number or another particle object
val: initial tuple of values for position and velocity (default: (None,None))
Raises:
DataError: if init is none of the types above
"""
# if init is another particles object, do a copy (init by copy)
if isinstance(init, type(self)):
self.pos = particles.position(init.pos)
self.vel = particles.velocity(init.vel)
self.q = init.q.copy()
self.m = init.m.copy()
# if init is a number, create particles object and pick the corresponding initial values
elif isinstance(init, int):
if isinstance(val, int) or isinstance(val, float) or val is None:
self.pos = particles.position(init, val=val)
self.vel = particles.velocity(init, val=val)
self.q = np.zeros(init)
self.q[:] = val
self.m = np.zeros(init)
self.m[:] = val
elif isinstance(val, tuple) and len(val) == 4:
self.pos = particles.position(init, val=val[0])
self.vel = particles.velocity(init, val=val[1])
self.q = np.zeros(init)
self.q[:] = val[2]
self.m = np.zeros(init)
self.m[:] = val[3]
else:
raise DataError('type of val is wrong, got %s', val)
elif isinstance(init, tuple):
if isinstance(val, int) or isinstance(val, float) or val is None:
self.pos = particles.position(init, val=val)
self.vel = particles.velocity(init, val=val)
self.q = np.zeros(init[-1])
self.q[:] = val
self.m = np.zeros(init[-1])
self.m[:] = val
elif isinstance(val, tuple) and len(val) == 4:
self.pos = particles.position(init, val=val[0])
self.vel = particles.velocity(init, val=val[1])
self.q = np.zeros(init[-1])
self.q[:] = val[2]
self.m = np.zeros(init[-1])
self.m[:] = val[3]
else:
raise DataError('type of val is wrong, got %s', val)
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __add__(self, other):
"""
Overloading the addition operator for particles types
Args:
other (particles): particles object to be added
Raises:
DataError: if other is not a particles object
Returns:
particles: sum of caller and other values (self+other)
"""
if isinstance(other, type(self)):
# always create new particles, since otherwise c = a + b changes a as well!
p = particles(self.pos.values.shape)
p.pos = self.pos + other.pos
p.vel = self.vel + other.vel
p.m = self.m
p.q = self.q
return p
else:
raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __sub__(self, other):
"""
Overloading the subtraction operator for particles types
Args:
other (particles): particles object to be subtracted
Raises:
DataError: if other is not a particles object
Returns:
particles: differences between caller and other values (self-other)
"""
if isinstance(other, type(self)):
# always create new particles, since otherwise c = a - b changes a as well!
p = particles(self.pos.values.shape)
p.pos = self.pos - other.pos
p.vel = self.vel - other.vel
p.m = self.m
p.q = self.q
return p
else:
raise DataError("Type error: cannot subtract %s from %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the right multiply by factor operator for particles types
Args:
other (float): factor
Raises:
DataError: if other is not a particles object
Returns:
particles: scaled particle's velocity and position as new particle
"""
if isinstance(other, float):
# always create new particles
p = particles(self.pos.values.shape)
p.pos = other * self.pos
p.vel.values = other * self.vel.values
p.m = self.m
p.q = self.q
return p
else:
raise DataError("Type error: cannot multiply %s to %s" % (type(other), type(self)))
def __abs__(self):
"""
Overloading the abs operator for particles types
Returns:
float: absolute maximum of abs(pos) and abs(vel) for all particles
"""
abspos = abs(self.pos)
absvel = abs(self.vel)
return np.amax((abspos, absvel))
def send(self, dest=None, tag=None, comm=None):
"""
Routine for sending data forward in time (blocking)
Args:
dest (int): target rank
tag (int): communication tag
comm: communicator
Returns:
None
"""
comm.send(self, dest=dest, tag=tag)
return None
def isend(self, dest=None, tag=None, comm=None):
"""
Routine for sending data forward in time (non-blocking)
Args:
dest (int): target rank
tag (int): communication tag
comm: communicator
Returns:
request handle
"""
return comm.isend(self, dest=dest, tag=tag)
def recv(self, source=None, tag=None, comm=None):
"""
Routine for receiving in time
Args:
source (int): source rank
tag (int): communication tag
comm: communicator
Returns:
None
"""
part = comm.recv(source=source, tag=tag)
self.pos = part.pos.copy()
self.vel = part.vel.copy()
self.m = part.m.copy()
self.q = part.q.copy()
return None
class acceleration(object):
"""
Acceleration data type for particles in 3 dimensions
Attributes:
values (np.ndarray): array with 3 acceleration values per particle (dim. 3*nparts)
"""
def __init__(self, init=None, val=None):
"""
Initialization routine
Args:
init: can either be a number or another acceleration object
val: initial value (default: None)
Raises:
DataError: if init is none of the types above
"""
# if init is another particles object, do a copy (init by copy)
if isinstance(init, acceleration):
self.values = init.values.copy()
# if init is a number, create acceleration object with val as initial value
elif isinstance(init, int) or isinstance(init, tuple):
self.values = np.empty(init)
self.values[:] = val
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __add__(self, other):
"""
Overloading the addition operator for acceleration types
Args:
other (acceleration): acceleration object to be added
Raises:
DataError: if other is not a acceleration object
Returns:
acceleration: sum of caller and other values (self+other)
"""
# cannot do type-checking here, because otherwise f-interpolation would not work
# (multiplication with a constant yields velocity, velocity + acceleration = booom!
# if isinstance(other, type(self)):
# always create new acceleration, since otherwise c = a + b changes a as well!
acc = acceleration(self.values.shape)
acc.values = self.values + other.values
return acc
# else:
# raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __sub__(self, other):
"""
Overloading the subtraction operator for acceleration types
Args:
other (acceleration): acceleration object to be subtracted
Raises:
DataError: if other is not a acceleration object
Returns:
acceleration: subtraction of caller and other values (self+other)
"""
if isinstance(other, type(self)):
# always create new acceleration, since otherwise c = a + b changes a as well!
acc = acceleration(self.values.shape)
acc.values = self.values - other.values
return acc
else:
raise DataError("Type error: cannot subtract %s to %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the right multiply by factor operator for acceleration types
Args:
other (float): factor
Raises:
DataError: is other is not a float
Returns:
velocity: original values scaled by factor, tranformed to velocity
"""
if isinstance(other, float):
# create new velocity, interpret float factor as time (time x acceleration = velocity)
vel = particles.velocity(self.values.shape)
vel.values = self.values * other
return vel
else:
raise DataError("Type error: cannot multiply %s to %s" % (type(other), type(self)))
class fields(object):
"""
Field data type for 3 dimensions
This data type can be used for electric and magnetic fields in 3 dimensions
Attributes:
elec: contains the electric field
magn: contains the magnetic field
"""
class electric(object):
"""
Electric field data type in 3 dimensions
Attributes:
values (np.ndarray): array with 3 field values per particle (dim. 3*nparts)
"""
def __init__(self, init=None, val=None):
"""
Initialization routine
Args:
init: can either be a number or another electric object
val: initial value (default: None)
Raises:
DataError: if init is none of the types above
"""
# if init is another electric object, do a copy (init by copy)
if isinstance(init, type(self)):
self.values = init.values.copy()
# if init is a number, create electric object with val as initial value
elif isinstance(init, int) or isinstance(init, tuple):
self.values = np.empty(init)
self.values[:] = val
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __add__(self, other):
"""
Overloading the addition operator for electric types
Args:
other (electric): electric object to be added
Raises:
DataError: if other is not a electric object
Returns:
electric: sum of caller and other values (self+other)
"""
if isinstance(other, type(self)):
# always create new electric, since otherwise c = a + b changes a as well!
E = fields.electric(self.values.shape)
E.values = self.values + other.values
return E
else:
raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __sub__(self, other):
"""
Overloading the subtraction operator for electric types
Args:
other (electric): electric object to be subtracted
Raises:
DataError: if other is not a electric object
Returns:
electric: difference of caller and other values (self-other)
"""
if isinstance(other, type(self)):
# always create new electric, since otherwise c = a + b changes a as well!
E = fields.electric(self.values.shape)
E.values = self.values - other.values
return E
else:
raise DataError("Type error: cannot subtract %s from %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the right multiply by factor operator for electric types
Args:
other (float): factor
Raises:
DataError: is other is not a float
Returns:
electric: original values scaled by factor
"""
if isinstance(other, float):
# create new electric, no specific interpretation of float factor
E = fields.electric(self.values.shape)
E.values = self.values * other
return E
else:
raise DataError("Type error: cannot multiply %s to %s" % (type(other), type(self)))
class magnetic(object):
"""
Magnetic field data type in 3 dimensions
Attributes:
values (np.ndarray): array with 3 field values per particle (dim. 3*nparts)
"""
def __init__(self, init=None, val=None):
"""
Initialization routine
Args:
init: can either be a number or another magnetic object
val: initial value (default: None)
Raises:
DataError: if init is none of the types above
"""
# if init is another magnetic object, do a copy (init by copy)
if isinstance(init, type(self)):
self.values = init.values.copy()
# if init is a number, create magnetic object with val as initial value
elif isinstance(init, int) or isinstance(init, tuple):
self.values = np.empty(init)
self.values[:] = val
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __add__(self, other):
"""
Overloading the addition operator for magnetic types
Args:
other (magnetic): magnetic object to be added
Raises:
DataError: if other is not a magnetic object
Returns:
magnetic: sum of caller and other values (self+other)
"""
if isinstance(other, type(self)):
# always create new magnetic, since otherwise c = a + b changes a as well!
M = fields.magnetic(self.values.shape)
M.values = self.values + other.values
return M
else:
raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __sub__(self, other):
"""
Overloading the subrtaction operator for magnetic types
Args:
other (magnetic): magnetic object to be subtracted
Raises:
DataError: if other is not a magnetic object
Returns:
magnetic: difference of caller and other values (self-other)
"""
if isinstance(other, type(self)):
# always create new magnetic, since otherwise c = a + b changes a as well!
M = fields.magnetic(self.values.shape)
M.values = self.values - other.values
return M
else:
raise DataError("Type error: cannot subtract %s from %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the right multiply by factor operator for magnetic types
Args:
other (float): factor
Raises:
DataError: is other is not a float
Returns:
electric: original values scaled by factor, transformed to electric
"""
if isinstance(other, float):
# create new magnetic, no specific interpretation of float factor
M = fields.magnetic(self.values.shape)
M.values = self.values * other
return M
else:
raise DataError("Type error: cannot multiply %s to %s" % (type(other), type(self)))
def __init__(self, init=None, val=None):
"""
Initialization routine
Args:
init: can either be a number or another fields object
val: initial tuple of values for electric and magnetic (default: (None,None))
Raises:
DataError: if init is none of the types above
"""
# if init is another fields object, do a copy (init by copy)
if isinstance(init, type(self)):
self.elec = fields.electric(init.elec)
self.magn = fields.magnetic(init.magn)
# if init is a number, create fields object and pick the corresponding initial values
elif isinstance(init, int) or isinstance(init, tuple):
if isinstance(val, int) or isinstance(val, float) or val is None:
self.elec = fields.electric(init, val=val)
self.magn = fields.magnetic(init, val=val)
elif isinstance(val, tuple) and len(val) == 2:
self.elec = fields.electric(init, val=val[0])
self.magn = fields.magnetic(init, val=val[1])
else:
raise DataError('wrong type of val, got %s' % val)
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __add__(self, other):
"""
Overloading the addition operator for fields types
Args:
other (fields): fields object to be added
Raises:
DataError: if other is not a fields object
Returns:
fields: sum of caller and other values (self+other)
"""
if isinstance(other, type(self)):
# always create new fields, since otherwise c = a - b changes a as well!
p = fields(self.elec.values.shape)
p.elec = self.elec + other.elec
p.magn = self.magn + other.magn
return p
else:
raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __sub__(self, other):
"""
Overloading the subtraction operator for fields types
Args:
other (fields): fields object to be subtracted
Raises:
DataError: if other is not a fields object
Returns:
fields: differences between caller and other values (self-other)
"""
if isinstance(other, type(self)):
# always create new fields, since otherwise c = a - b changes a as well!
p = fields(self.elec.values.shape)
p.elec = self.elec - other.elec
p.magn = self.magn - other.magn
return p
else:
raise DataError("Type error: cannot subtract %s from %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the multiply with factor from right operator for fields types
Args:
other (float): factor
Raises:
DataError: if other is not a fields object
Returns:
fields: scaled fields
"""
if isinstance(other, float):
# always create new fields, since otherwise c = a - b changes a as well!
p = fields(self.elec.values.shape)
p.elec = other * self.elec
p.magn = other * self.magn
return p
else:
raise DataError("Type error: cannot multiply %s with %s" % (type(other), type(self)))
| 36.316667 | 111 | 0.547428 | 3,247 | 28,327 | 4.737604 | 0.058208 | 0.024963 | 0.029578 | 0.032893 | 0.897484 | 0.881948 | 0.864981 | 0.826952 | 0.816551 | 0.802964 | 0 | 0.002135 | 0.371765 | 28,327 | 779 | 112 | 36.363286 | 0.862272 | 0.40322 | 0 | 0.770318 | 0 | 0 | 0.079566 | 0 | 0 | 0 | 0 | 0.001284 | 0 | 1 | 0.120141 | false | 0 | 0.010601 | 0 | 0.250883 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c1c94b61fa0cd855b4f02990a428a0336604d1e6 | 8,735 | py | Python | Code/Simulations/physics_models.py | jaspertaylor-projects/QuantumLatticeGasAlgorithm | a7fb8da08e0bd41c5b7fda96f2d5cb50a95cb0ca | [
"MIT"
] | 1 | 2020-05-21T19:34:20.000Z | 2020-05-21T19:34:20.000Z | Code/Simulations/physics_models.py | jaspertaylor-projects/QuantumLatticeGasAlgorithm | a7fb8da08e0bd41c5b7fda96f2d5cb50a95cb0ca | [
"MIT"
] | null | null | null | Code/Simulations/physics_models.py | jaspertaylor-projects/QuantumLatticeGasAlgorithm | a7fb8da08e0bd41c5b7fda96f2d5cb50a95cb0ca | [
"MIT"
] | null | null | null | import numpy as np
evolution_func = None
gpu_field_copy_pointers = []
def set_field_pointers(gpu, num_GPUs):
global gpu_field_copy_pointers
for i in xrange(num_GPUs):
gpu_field_copy_pointers.append(gpu[i].QFieldCopy)
def set_model(gpu, num_GPUs, model, dimensions, num_particles):
global evolution_func, gpu_field_copy_pointers
if num_particles==1:
evolution_func = eval(get_dim_string(dimensions) + "_D_EVOLUTION")
else:
evolution_func = eval(get_dim_string(dimensions) + "_D_EVOLUTION_MULTI")
set_field_pointers(gpu, num_GPUs)
def get_dim_string(dimensions):
if dimensions == 1:
return "ONE"
if dimensions == 2:
return "TWO"
if dimensions == 3:
return "THREE"
def evolve(gpu, num_GPUs, steps):
for i in xrange(steps):
evolution_func(gpu, num_GPUs)
INCREMENT_TIME_STEP(gpu, num_GPUs)
def INCREMENT_TIME_STEP(gpu, num_GPUs):
for i in xrange(num_GPUs):
gpu[i].incrementTime()
def SYNC(gpu, num_GPUs):
for i in xrange(num_GPUs):
gpu[i].synchronizeDevice()
def COLLIDE(gpu, num_GPUs):
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].collide()
def SET_COPY(gpu, num_GPUs):
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].setCopy()
def STREAM_COLLIDE(gpu, num_GPUs, dimension, component):
COLLIDE(gpu, num_GPUs)
if dimension == "X":
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
# gpu[i].stream("Pos", dimension, component, NeighborField = gpu[(i-1+num_GPUs)%num_GPUs].QFieldCopy)
gpu[i].stream("Pos", dimension, component, num_GPUs, gpu_field_copy_pointers)
else:
for i in xrange(num_GPUs):
gpu[i].stream("Pos", dimension, component, num_GPUs, gpu_field_copy_pointers)
COLLIDE(gpu, num_GPUs)
if dimension == "X":
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
# gpu[i].stream("Neg", dimension, component, NeighborField = gpu[(i+1)%num_GPUs].QFieldCopy)
gpu[i].stream("Neg", dimension, component, num_GPUs, gpu_field_copy_pointers)
else:
for i in xrange(num_GPUs):
gpu[i].stream("Neg", dimension, component, num_GPUs, gpu_field_copy_pointers)
def STREAM_COLLIDE_MULTI(gpu, num_GPUs, dimension, component):
SET_COPY(gpu, num_GPUs)
#Collide
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].collide_multi(dimension, num_GPUs, gpu_field_copy_pointers)
SET_COPY(gpu, num_GPUs)
# Stream
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].stream("Pos", dimension, component, num_GPUs, gpu_field_copy_pointers)
SET_COPY(gpu, num_GPUs)
# Collide
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].collide_multi(dimension, num_GPUs, gpu_field_copy_pointers)
SET_COPY(gpu, num_GPUs)
#Stream
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].stream("Neg", dimension, component, num_GPUs, gpu_field_copy_pointers)
def INTERNAL(gpu, num_GPUs):
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].internal_interaction()
def EXTERNAL(gpu, num_GPUs):
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].external_interaction()
def MEASUREMENT(gpu, num_GPUs):
for i in xrange(num_GPUs):
SYNC(gpu, num_GPUs)
gpu[i].measurement_interaction()
def ONE_D_EVOLUTION(gpu, num_GPUs):
STREAM_COLLIDE(gpu, num_GPUs, "X", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "X", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "X", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "X", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
MEASUREMENT(gpu, num_GPUs)
def TWO_D_EVOLUTION(gpu, num_GPUs):
STREAM_COLLIDE(gpu, num_GPUs, "X", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Y", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "X", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Y", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
######### Halfway ##########
STREAM_COLLIDE(gpu, num_GPUs, "Y", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "X", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Y", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "X", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
MEASUREMENT(gpu, num_GPUs)
def THREE_D_EVOLUTION(gpu, num_GPUs):
STREAM_COLLIDE(gpu, num_GPUs, "X", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "X", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Y", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Y", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Z", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Z", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
######### Halfway ##########
STREAM_COLLIDE(gpu, num_GPUs, "X", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "X", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Y", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Y", 0)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Z", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE(gpu, num_GPUs, "Z", 1)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
MEASUREMENT(gpu, num_GPUs)
def ONE_D_EVOLUTION_MULTI(gpu, num_GPUs):
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
MEASUREMENT(gpu, num_GPUs)
def TWO_D_EVOLUTION_MULTI(gpu, num_GPUs):
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Y", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Y", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
######## Halfway ##########
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Y", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Y", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
MEASUREMENT(gpu, num_GPUs)
def THREE_D_EVOLUTION_MULTI(gpu, num_GPUs):
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Y", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Y", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Z", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Z", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
######### Halfway ##########
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "X", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Y", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Y", 0)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Z", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
STREAM_COLLIDE_MULTI(gpu, num_GPUs, "Z", 1)
SET_COPY(gpu, num_GPUs)
INTERNAL(gpu, num_GPUs)
EXTERNAL(gpu, num_GPUs)
MEASUREMENT(gpu, num_GPUs) | 29.214047 | 104 | 0.728563 | 1,438 | 8,735 | 4.129346 | 0.045202 | 0.281745 | 0.357023 | 0.148535 | 0.913944 | 0.898956 | 0.870664 | 0.866959 | 0.858538 | 0.842034 | 0 | 0.00714 | 0.134173 | 8,735 | 299 | 105 | 29.214047 | 0.777998 | 0.028964 | 0 | 0.830189 | 0 | 0 | 0.012976 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.003774 | null | null | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
c1dfcc38e0b8e684d3b819cce548466a9a34f464 | 45,746 | py | Python | sdk/python/pulumi_aws/fsx/data_repository_association.py | dmelo/pulumi-aws | dd1a08d1fb93bab0d046aa410ca660f05ca0a58c | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/fsx/data_repository_association.py | dmelo/pulumi-aws | dd1a08d1fb93bab0d046aa410ca660f05ca0a58c | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/fsx/data_repository_association.py | dmelo/pulumi-aws | dd1a08d1fb93bab0d046aa410ca660f05ca0a58c | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DataRepositoryAssociationArgs', 'DataRepositoryAssociation']
@pulumi.input_type
class DataRepositoryAssociationArgs:
def __init__(__self__, *,
data_repository_path: pulumi.Input[str],
file_system_id: pulumi.Input[str],
file_system_path: pulumi.Input[str],
batch_import_meta_data_on_create: Optional[pulumi.Input[bool]] = None,
delete_data_in_filesystem: Optional[pulumi.Input[bool]] = None,
imported_file_chunk_size: Optional[pulumi.Input[int]] = None,
s3: Optional[pulumi.Input['DataRepositoryAssociationS3Args']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a DataRepositoryAssociation resource.
:param pulumi.Input[str] data_repository_path: The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system.
:param pulumi.Input[str] file_system_id: The ID of the Amazon FSx file system to on which to create a data repository association.
:param pulumi.Input[str] file_system_path: A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `data_repository_path`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory.
:param pulumi.Input[bool] batch_import_meta_data_on_create: Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`.
:param pulumi.Input[bool] delete_data_in_filesystem: Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`.
:param pulumi.Input[int] imported_file_chunk_size: For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system.
:param pulumi.Input['DataRepositoryAssociationS3Args'] s3: See the `s3` configuration block. Max of 1.
The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the data repository association. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
pulumi.set(__self__, "data_repository_path", data_repository_path)
pulumi.set(__self__, "file_system_id", file_system_id)
pulumi.set(__self__, "file_system_path", file_system_path)
if batch_import_meta_data_on_create is not None:
pulumi.set(__self__, "batch_import_meta_data_on_create", batch_import_meta_data_on_create)
if delete_data_in_filesystem is not None:
pulumi.set(__self__, "delete_data_in_filesystem", delete_data_in_filesystem)
if imported_file_chunk_size is not None:
pulumi.set(__self__, "imported_file_chunk_size", imported_file_chunk_size)
if s3 is not None:
pulumi.set(__self__, "s3", s3)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
@property
@pulumi.getter(name="dataRepositoryPath")
def data_repository_path(self) -> pulumi.Input[str]:
"""
The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system.
"""
return pulumi.get(self, "data_repository_path")
@data_repository_path.setter
def data_repository_path(self, value: pulumi.Input[str]):
pulumi.set(self, "data_repository_path", value)
@property
@pulumi.getter(name="fileSystemId")
def file_system_id(self) -> pulumi.Input[str]:
"""
The ID of the Amazon FSx file system to on which to create a data repository association.
"""
return pulumi.get(self, "file_system_id")
@file_system_id.setter
def file_system_id(self, value: pulumi.Input[str]):
pulumi.set(self, "file_system_id", value)
@property
@pulumi.getter(name="fileSystemPath")
def file_system_path(self) -> pulumi.Input[str]:
"""
A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `data_repository_path`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory.
"""
return pulumi.get(self, "file_system_path")
@file_system_path.setter
def file_system_path(self, value: pulumi.Input[str]):
pulumi.set(self, "file_system_path", value)
@property
@pulumi.getter(name="batchImportMetaDataOnCreate")
def batch_import_meta_data_on_create(self) -> Optional[pulumi.Input[bool]]:
"""
Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`.
"""
return pulumi.get(self, "batch_import_meta_data_on_create")
@batch_import_meta_data_on_create.setter
def batch_import_meta_data_on_create(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "batch_import_meta_data_on_create", value)
@property
@pulumi.getter(name="deleteDataInFilesystem")
def delete_data_in_filesystem(self) -> Optional[pulumi.Input[bool]]:
"""
Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`.
"""
return pulumi.get(self, "delete_data_in_filesystem")
@delete_data_in_filesystem.setter
def delete_data_in_filesystem(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delete_data_in_filesystem", value)
@property
@pulumi.getter(name="importedFileChunkSize")
def imported_file_chunk_size(self) -> Optional[pulumi.Input[int]]:
"""
For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system.
"""
return pulumi.get(self, "imported_file_chunk_size")
@imported_file_chunk_size.setter
def imported_file_chunk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "imported_file_chunk_size", value)
@property
@pulumi.getter
def s3(self) -> Optional[pulumi.Input['DataRepositoryAssociationS3Args']]:
"""
See the `s3` configuration block. Max of 1.
The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository.
"""
return pulumi.get(self, "s3")
@s3.setter
def s3(self, value: Optional[pulumi.Input['DataRepositoryAssociationS3Args']]):
pulumi.set(self, "s3", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the data repository association. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@pulumi.input_type
class _DataRepositoryAssociationState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
association_id: Optional[pulumi.Input[str]] = None,
batch_import_meta_data_on_create: Optional[pulumi.Input[bool]] = None,
data_repository_path: Optional[pulumi.Input[str]] = None,
delete_data_in_filesystem: Optional[pulumi.Input[bool]] = None,
file_system_id: Optional[pulumi.Input[str]] = None,
file_system_path: Optional[pulumi.Input[str]] = None,
imported_file_chunk_size: Optional[pulumi.Input[int]] = None,
s3: Optional[pulumi.Input['DataRepositoryAssociationS3Args']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering DataRepositoryAssociation resources.
:param pulumi.Input[str] arn: Amazon Resource Name of the file system.
:param pulumi.Input[bool] batch_import_meta_data_on_create: Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`.
:param pulumi.Input[str] data_repository_path: The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system.
:param pulumi.Input[bool] delete_data_in_filesystem: Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`.
:param pulumi.Input[str] file_system_id: The ID of the Amazon FSx file system to on which to create a data repository association.
:param pulumi.Input[str] file_system_path: A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `data_repository_path`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory.
:param pulumi.Input[int] imported_file_chunk_size: For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system.
:param pulumi.Input['DataRepositoryAssociationS3Args'] s3: See the `s3` configuration block. Max of 1.
The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the data repository association. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if association_id is not None:
pulumi.set(__self__, "association_id", association_id)
if batch_import_meta_data_on_create is not None:
pulumi.set(__self__, "batch_import_meta_data_on_create", batch_import_meta_data_on_create)
if data_repository_path is not None:
pulumi.set(__self__, "data_repository_path", data_repository_path)
if delete_data_in_filesystem is not None:
pulumi.set(__self__, "delete_data_in_filesystem", delete_data_in_filesystem)
if file_system_id is not None:
pulumi.set(__self__, "file_system_id", file_system_id)
if file_system_path is not None:
pulumi.set(__self__, "file_system_path", file_system_path)
if imported_file_chunk_size is not None:
pulumi.set(__self__, "imported_file_chunk_size", imported_file_chunk_size)
if s3 is not None:
pulumi.set(__self__, "s3", s3)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name of the file system.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="associationId")
def association_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "association_id")
@association_id.setter
def association_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "association_id", value)
@property
@pulumi.getter(name="batchImportMetaDataOnCreate")
def batch_import_meta_data_on_create(self) -> Optional[pulumi.Input[bool]]:
"""
Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`.
"""
return pulumi.get(self, "batch_import_meta_data_on_create")
@batch_import_meta_data_on_create.setter
def batch_import_meta_data_on_create(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "batch_import_meta_data_on_create", value)
@property
@pulumi.getter(name="dataRepositoryPath")
def data_repository_path(self) -> Optional[pulumi.Input[str]]:
"""
The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system.
"""
return pulumi.get(self, "data_repository_path")
@data_repository_path.setter
def data_repository_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data_repository_path", value)
@property
@pulumi.getter(name="deleteDataInFilesystem")
def delete_data_in_filesystem(self) -> Optional[pulumi.Input[bool]]:
"""
Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`.
"""
return pulumi.get(self, "delete_data_in_filesystem")
@delete_data_in_filesystem.setter
def delete_data_in_filesystem(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delete_data_in_filesystem", value)
@property
@pulumi.getter(name="fileSystemId")
def file_system_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Amazon FSx file system to on which to create a data repository association.
"""
return pulumi.get(self, "file_system_id")
@file_system_id.setter
def file_system_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "file_system_id", value)
@property
@pulumi.getter(name="fileSystemPath")
def file_system_path(self) -> Optional[pulumi.Input[str]]:
"""
A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `data_repository_path`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory.
"""
return pulumi.get(self, "file_system_path")
@file_system_path.setter
def file_system_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "file_system_path", value)
@property
@pulumi.getter(name="importedFileChunkSize")
def imported_file_chunk_size(self) -> Optional[pulumi.Input[int]]:
"""
For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system.
"""
return pulumi.get(self, "imported_file_chunk_size")
@imported_file_chunk_size.setter
def imported_file_chunk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "imported_file_chunk_size", value)
@property
@pulumi.getter
def s3(self) -> Optional[pulumi.Input['DataRepositoryAssociationS3Args']]:
"""
See the `s3` configuration block. Max of 1.
The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository.
"""
return pulumi.get(self, "s3")
@s3.setter
def s3(self, value: Optional[pulumi.Input['DataRepositoryAssociationS3Args']]):
pulumi.set(self, "s3", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the data repository association. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
class DataRepositoryAssociation(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
batch_import_meta_data_on_create: Optional[pulumi.Input[bool]] = None,
data_repository_path: Optional[pulumi.Input[str]] = None,
delete_data_in_filesystem: Optional[pulumi.Input[bool]] = None,
file_system_id: Optional[pulumi.Input[str]] = None,
file_system_path: Optional[pulumi.Input[str]] = None,
imported_file_chunk_size: Optional[pulumi.Input[int]] = None,
s3: Optional[pulumi.Input[pulumi.InputType['DataRepositoryAssociationS3Args']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages a FSx for Lustre Data Repository Association. See [Linking your file system to an S3 bucket](https://docs.aws.amazon.com/fsx/latest/LustreGuide/create-dra-linked-data-repo.html) for more information.
> **NOTE:** Data Repository Associations are only compatible with AWS FSx for Lustre File Systems and `PERSISTENT_2` deployment type.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example_bucket = aws.s3.Bucket("exampleBucket", acl="private")
example_lustre_file_system = aws.fsx.LustreFileSystem("exampleLustreFileSystem",
storage_capacity=1200,
subnet_ids=[aws_subnet["example"]["id"]],
deployment_type="PERSISTENT_2",
per_unit_storage_throughput=125)
example_data_repository_association = aws.fsx.DataRepositoryAssociation("exampleDataRepositoryAssociation",
file_system_id=example_lustre_file_system.id,
data_repository_path=example_bucket.id.apply(lambda id: f"s3://{id}"),
file_system_path="/my-bucket",
s3=aws.fsx.DataRepositoryAssociationS3Args(
auto_export_policy=aws.fsx.DataRepositoryAssociationS3AutoExportPolicyArgs(
events=[
"NEW",
"CHANGED",
"DELETED",
],
),
auto_import_policy=aws.fsx.DataRepositoryAssociationS3AutoImportPolicyArgs(
events=[
"NEW",
"CHANGED",
"DELETED",
],
),
))
```
## Import
FSx Data Repository Associations can be imported using the `id`, e.g.,
```sh
$ pulumi import aws:fsx/dataRepositoryAssociation:DataRepositoryAssociation example dra-0b1cfaeca11088b10
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] batch_import_meta_data_on_create: Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`.
:param pulumi.Input[str] data_repository_path: The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system.
:param pulumi.Input[bool] delete_data_in_filesystem: Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`.
:param pulumi.Input[str] file_system_id: The ID of the Amazon FSx file system to on which to create a data repository association.
:param pulumi.Input[str] file_system_path: A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `data_repository_path`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory.
:param pulumi.Input[int] imported_file_chunk_size: For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system.
:param pulumi.Input[pulumi.InputType['DataRepositoryAssociationS3Args']] s3: See the `s3` configuration block. Max of 1.
The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the data repository association. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DataRepositoryAssociationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a FSx for Lustre Data Repository Association. See [Linking your file system to an S3 bucket](https://docs.aws.amazon.com/fsx/latest/LustreGuide/create-dra-linked-data-repo.html) for more information.
> **NOTE:** Data Repository Associations are only compatible with AWS FSx for Lustre File Systems and `PERSISTENT_2` deployment type.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example_bucket = aws.s3.Bucket("exampleBucket", acl="private")
example_lustre_file_system = aws.fsx.LustreFileSystem("exampleLustreFileSystem",
storage_capacity=1200,
subnet_ids=[aws_subnet["example"]["id"]],
deployment_type="PERSISTENT_2",
per_unit_storage_throughput=125)
example_data_repository_association = aws.fsx.DataRepositoryAssociation("exampleDataRepositoryAssociation",
file_system_id=example_lustre_file_system.id,
data_repository_path=example_bucket.id.apply(lambda id: f"s3://{id}"),
file_system_path="/my-bucket",
s3=aws.fsx.DataRepositoryAssociationS3Args(
auto_export_policy=aws.fsx.DataRepositoryAssociationS3AutoExportPolicyArgs(
events=[
"NEW",
"CHANGED",
"DELETED",
],
),
auto_import_policy=aws.fsx.DataRepositoryAssociationS3AutoImportPolicyArgs(
events=[
"NEW",
"CHANGED",
"DELETED",
],
),
))
```
## Import
FSx Data Repository Associations can be imported using the `id`, e.g.,
```sh
$ pulumi import aws:fsx/dataRepositoryAssociation:DataRepositoryAssociation example dra-0b1cfaeca11088b10
```
:param str resource_name: The name of the resource.
:param DataRepositoryAssociationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DataRepositoryAssociationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
batch_import_meta_data_on_create: Optional[pulumi.Input[bool]] = None,
data_repository_path: Optional[pulumi.Input[str]] = None,
delete_data_in_filesystem: Optional[pulumi.Input[bool]] = None,
file_system_id: Optional[pulumi.Input[str]] = None,
file_system_path: Optional[pulumi.Input[str]] = None,
imported_file_chunk_size: Optional[pulumi.Input[int]] = None,
s3: Optional[pulumi.Input[pulumi.InputType['DataRepositoryAssociationS3Args']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DataRepositoryAssociationArgs.__new__(DataRepositoryAssociationArgs)
__props__.__dict__["batch_import_meta_data_on_create"] = batch_import_meta_data_on_create
if data_repository_path is None and not opts.urn:
raise TypeError("Missing required property 'data_repository_path'")
__props__.__dict__["data_repository_path"] = data_repository_path
__props__.__dict__["delete_data_in_filesystem"] = delete_data_in_filesystem
if file_system_id is None and not opts.urn:
raise TypeError("Missing required property 'file_system_id'")
__props__.__dict__["file_system_id"] = file_system_id
if file_system_path is None and not opts.urn:
raise TypeError("Missing required property 'file_system_path'")
__props__.__dict__["file_system_path"] = file_system_path
__props__.__dict__["imported_file_chunk_size"] = imported_file_chunk_size
__props__.__dict__["s3"] = s3
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["arn"] = None
__props__.__dict__["association_id"] = None
super(DataRepositoryAssociation, __self__).__init__(
'aws:fsx/dataRepositoryAssociation:DataRepositoryAssociation',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
association_id: Optional[pulumi.Input[str]] = None,
batch_import_meta_data_on_create: Optional[pulumi.Input[bool]] = None,
data_repository_path: Optional[pulumi.Input[str]] = None,
delete_data_in_filesystem: Optional[pulumi.Input[bool]] = None,
file_system_id: Optional[pulumi.Input[str]] = None,
file_system_path: Optional[pulumi.Input[str]] = None,
imported_file_chunk_size: Optional[pulumi.Input[int]] = None,
s3: Optional[pulumi.Input[pulumi.InputType['DataRepositoryAssociationS3Args']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'DataRepositoryAssociation':
"""
Get an existing DataRepositoryAssociation resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: Amazon Resource Name of the file system.
:param pulumi.Input[bool] batch_import_meta_data_on_create: Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`.
:param pulumi.Input[str] data_repository_path: The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system.
:param pulumi.Input[bool] delete_data_in_filesystem: Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`.
:param pulumi.Input[str] file_system_id: The ID of the Amazon FSx file system to on which to create a data repository association.
:param pulumi.Input[str] file_system_path: A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `data_repository_path`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory.
:param pulumi.Input[int] imported_file_chunk_size: For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system.
:param pulumi.Input[pulumi.InputType['DataRepositoryAssociationS3Args']] s3: See the `s3` configuration block. Max of 1.
The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the data repository association. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DataRepositoryAssociationState.__new__(_DataRepositoryAssociationState)
__props__.__dict__["arn"] = arn
__props__.__dict__["association_id"] = association_id
__props__.__dict__["batch_import_meta_data_on_create"] = batch_import_meta_data_on_create
__props__.__dict__["data_repository_path"] = data_repository_path
__props__.__dict__["delete_data_in_filesystem"] = delete_data_in_filesystem
__props__.__dict__["file_system_id"] = file_system_id
__props__.__dict__["file_system_path"] = file_system_path
__props__.__dict__["imported_file_chunk_size"] = imported_file_chunk_size
__props__.__dict__["s3"] = s3
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
return DataRepositoryAssociation(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
Amazon Resource Name of the file system.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="associationId")
def association_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "association_id")
@property
@pulumi.getter(name="batchImportMetaDataOnCreate")
def batch_import_meta_data_on_create(self) -> pulumi.Output[Optional[bool]]:
"""
Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`.
"""
return pulumi.get(self, "batch_import_meta_data_on_create")
@property
@pulumi.getter(name="dataRepositoryPath")
def data_repository_path(self) -> pulumi.Output[str]:
"""
The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system.
"""
return pulumi.get(self, "data_repository_path")
@property
@pulumi.getter(name="deleteDataInFilesystem")
def delete_data_in_filesystem(self) -> pulumi.Output[Optional[bool]]:
"""
Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`.
"""
return pulumi.get(self, "delete_data_in_filesystem")
@property
@pulumi.getter(name="fileSystemId")
def file_system_id(self) -> pulumi.Output[str]:
"""
The ID of the Amazon FSx file system to on which to create a data repository association.
"""
return pulumi.get(self, "file_system_id")
@property
@pulumi.getter(name="fileSystemPath")
def file_system_path(self) -> pulumi.Output[str]:
"""
A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `data_repository_path`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory.
"""
return pulumi.get(self, "file_system_path")
@property
@pulumi.getter(name="importedFileChunkSize")
def imported_file_chunk_size(self) -> pulumi.Output[int]:
"""
For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system.
"""
return pulumi.get(self, "imported_file_chunk_size")
@property
@pulumi.getter
def s3(self) -> pulumi.Output['outputs.DataRepositoryAssociationS3']:
"""
See the `s3` configuration block. Max of 1.
The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository.
"""
return pulumi.get(self, "s3")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A map of tags to assign to the data repository association. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
| 68.074405 | 727 | 0.706073 | 6,143 | 45,746 | 5.081393 | 0.050301 | 0.058626 | 0.048695 | 0.018869 | 0.930899 | 0.922153 | 0.913791 | 0.90732 | 0.903892 | 0.891366 | 0 | 0.005965 | 0.212084 | 45,746 | 671 | 728 | 68.175857 | 0.86006 | 0.540987 | 0 | 0.739496 | 1 | 0 | 0.132322 | 0.07364 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162465 | false | 0.002801 | 0.165266 | 0.005602 | 0.42577 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e74aaf4b999ec5eac7b640bdc02e76c25b7e9cfc | 10,679 | py | Python | tests/test_curves.py | MothVine/DESC | 8f18ca63b34dad07ec67a4d43945d39287b303b8 | [
"MIT"
] | null | null | null | tests/test_curves.py | MothVine/DESC | 8f18ca63b34dad07ec67a4d43945d39287b303b8 | [
"MIT"
] | null | null | null | tests/test_curves.py | MothVine/DESC | 8f18ca63b34dad07ec67a4d43945d39287b303b8 | [
"MIT"
] | null | null | null | import numpy as np
import unittest
import pytest
from desc.geometry import FourierRZCurve, FourierXYZCurve, FourierPlanarCurve
from desc.grid import LinearGrid
class TestRZCurve(unittest.TestCase):
def test_length(self):
c = FourierRZCurve()
np.testing.assert_allclose(c.compute_length(grid=20), 10 * 2 * np.pi)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_length(grid=20), 10 * 2 * np.pi)
def test_curvature(self):
c = FourierRZCurve()
np.testing.assert_allclose(c.compute_curvature(grid=20), 1 / 10)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_curvature(grid=20), 1 / 10)
def test_torsion(self):
c = FourierRZCurve()
np.testing.assert_allclose(c.compute_torsion(grid=20), 0)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_torsion(grid=20), 0)
def test_frenet(self):
c = FourierRZCurve()
c.grid = 1
T, N, B = c.compute_frenet_frame(basis="rpz")
np.testing.assert_allclose(T, np.array([[0, 1, 0]]), atol=1e-12)
np.testing.assert_allclose(N, np.array([[-1, 0, 0]]), atol=1e-12)
np.testing.assert_allclose(B, np.array([[0, 0, 1]]), atol=1e-12)
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
c.translate([1, 1, 1])
c.grid = np.array([[0, 0, 0]])
T, N, B = c.compute_frenet_frame(basis="xyz")
np.testing.assert_allclose(T, np.array([[0, 1, 0]]), atol=1e-12)
np.testing.assert_allclose(N, np.array([[1, 0, 0]]), atol=1e-12)
np.testing.assert_allclose(B, np.array([[0, 0, 1]]), atol=1e-12)
def test_coords(self):
c = FourierRZCurve()
x, y, z = c.compute_coordinates(grid=np.array([[0.0, 0.0, 0.0]]), basis="xyz").T
np.testing.assert_allclose(x, 10)
np.testing.assert_allclose(y, 0)
np.testing.assert_allclose(z, 0)
c.rotate(angle=np.pi / 2)
c.flip([0, 1, 0])
c.translate([1, 1, 1])
r, p, z = c.compute_coordinates(grid=np.array([[0.0, 0.0, 0.0]]), basis="rpz").T
np.testing.assert_allclose(r, np.sqrt(1 ** 2 + 9 ** 2))
np.testing.assert_allclose(p, np.arctan2(-9, 1))
np.testing.assert_allclose(z, 1)
def test_misc(self):
c = FourierRZCurve()
grid = LinearGrid(L=1, M=4, N=4)
c.grid = grid
assert grid.eq(c.grid)
R, Z = c.get_coeffs(0)
np.testing.assert_allclose(R, 10)
np.testing.assert_allclose(Z, 0)
c.set_coeffs(0, 5, None)
np.testing.assert_allclose(
c.R_n,
[
5,
],
)
np.testing.assert_allclose(c.Z_n, [])
s = c.copy()
assert s.eq(c)
c.change_resolution(5)
assert c.N == 5
c.set_coeffs(-1, None, 2)
np.testing.assert_allclose(
c.R_n,
[5, 0, 0, 0, 0, 0],
)
np.testing.assert_allclose(c.Z_n, [0, 0, 0, 0, 2])
with pytest.raises(ValueError):
c.R_n = s.R_n
with pytest.raises(ValueError):
c.Z_n = s.Z_n
c.name = "my curve"
assert "my" in c.name
assert c.name in str(c)
assert "FourierRZCurve" in str(c)
assert c.sym
def test_asserts(self):
with pytest.raises(ValueError):
c = FourierRZCurve(R_n=[])
c = FourierRZCurve()
with pytest.raises(NotImplementedError):
c.compute_coordinates(dt=4)
with pytest.raises(TypeError):
c.grid = [1, 2, 3]
class TestXYZCurve(unittest.TestCase):
def test_length(self):
c = FourierXYZCurve()
np.testing.assert_allclose(c.compute_length(grid=20), 2 * 2 * np.pi)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_length(grid=20), 2 * 2 * np.pi)
def test_curvature(self):
c = FourierXYZCurve()
np.testing.assert_allclose(c.compute_curvature(grid=20), 1 / 2)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_curvature(grid=20), 1 / 2)
def test_torsion(self):
c = FourierXYZCurve(modes=[-1, 0, 1])
np.testing.assert_allclose(c.compute_torsion(grid=20), 0)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_curvature(grid=20), 1 / 2)
def test_frenet(self):
c = FourierXYZCurve()
c.grid = 1
T, N, B = c.compute_frenet_frame(basis="rpz")
np.testing.assert_allclose(T, np.array([[0, 0, -1]]), atol=1e-12)
np.testing.assert_allclose(N, np.array([[-1, 0, 0]]), atol=1e-12)
np.testing.assert_allclose(B, np.array([[0, 1, 0]]), atol=1e-12)
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
c.translate([1, 1, 1])
c.grid = np.array([0, 0, 0])
T, N, B = c.compute_frenet_frame(basis="xyz")
np.testing.assert_allclose(T, np.array([[0, 0, -1]]), atol=1e-12)
np.testing.assert_allclose(N, np.array([[1, 0, 0]]), atol=1e-12)
np.testing.assert_allclose(B, np.array([[0, 1, 0]]), atol=1e-12)
def test_coords(self):
c = FourierXYZCurve()
x, y, z = c.compute_coordinates(grid=np.array([[0.0, 0.0, 0.0]]), basis="xyz").T
np.testing.assert_allclose(x, 12)
np.testing.assert_allclose(y, 0)
np.testing.assert_allclose(z, 0)
c.rotate(angle=np.pi / 2)
c.flip([0, 1, 0])
c.translate([1, 1, 1])
r, p, z = c.compute_coordinates(grid=np.array([[0.0, 0.0, 0.0]]), basis="rpz").T
np.testing.assert_allclose(r, np.sqrt(1 ** 2 + 11 ** 2))
np.testing.assert_allclose(p, np.arctan2(-11, 1))
np.testing.assert_allclose(z, 1)
def test_misc(self):
c = FourierXYZCurve()
grid = LinearGrid(L=1, M=4, N=4)
c.grid = grid
assert grid.eq(c.grid)
X, Y, Z = c.get_coeffs(0)
np.testing.assert_allclose(X, 10)
np.testing.assert_allclose(Y, 0)
np.testing.assert_allclose(Z, 0)
c.set_coeffs(0, 5, 2, 3)
np.testing.assert_allclose(c.X_n, [0, 5, 2])
np.testing.assert_allclose(c.Y_n, [0, 2, 0])
np.testing.assert_allclose(c.Z_n, [-2, 3, 0])
s = c.copy()
assert s.eq(c)
c.change_resolution(5)
assert c.N == 5
with pytest.raises(ValueError):
c.X_n = s.X_n
with pytest.raises(ValueError):
c.Y_n = s.Y_n
with pytest.raises(ValueError):
c.Z_n = s.Z_n
def test_asserts(self):
c = FourierXYZCurve()
with pytest.raises(KeyError):
c.compute_coordinates(dt=4)
with pytest.raises(TypeError):
c.grid = [1, 2, 3]
class TestPlanarCurve(unittest.TestCase):
def test_length(self):
c = FourierPlanarCurve(modes=[0])
np.testing.assert_allclose(c.compute_length(grid=20), 2 * 2 * np.pi)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_length(grid=20), 2 * 2 * np.pi)
def test_curvature(self):
c = FourierPlanarCurve()
np.testing.assert_allclose(c.compute_curvature(grid=20), 1 / 2)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_curvature(grid=20), 1 / 2)
def test_torsion(self):
c = FourierPlanarCurve()
np.testing.assert_allclose(c.compute_torsion(grid=20), 0)
c.translate([1, 1, 1])
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
np.testing.assert_allclose(c.compute_torsion(grid=20), 0)
def test_frenet(self):
c = FourierPlanarCurve()
c.grid = 1
T, N, B = c.compute_frenet_frame(basis="xyz")
np.testing.assert_allclose(T, np.array([[0, 0, -1]]), atol=1e-12)
np.testing.assert_allclose(N, np.array([[-1, 0, 0]]), atol=1e-12)
np.testing.assert_allclose(B, np.array([[0, 1, 0]]), atol=1e-12)
c.rotate(angle=np.pi)
c.flip([0, 1, 0])
c.translate([1, 1, 1])
c.grid = np.array([0, 0, 0])
T, N, B = c.compute_frenet_frame(grid=np.array([[0.0, 0.0, 0.0]]), basis="xyz")
np.testing.assert_allclose(T, np.array([[0, 0, -1]]), atol=1e-12)
np.testing.assert_allclose(N, np.array([[1, 0, 0]]), atol=1e-12)
np.testing.assert_allclose(B, np.array([[0, 1, 0]]), atol=1e-12)
def test_coords(self):
c = FourierPlanarCurve()
r, p, z = c.compute_coordinates(grid=np.array([[0.0, 0.0, 0.0]]), basis="rpz").T
np.testing.assert_allclose(r, 12)
np.testing.assert_allclose(p, 0)
np.testing.assert_allclose(z, 0)
dr, dp, dz = c.compute_coordinates(
grid=np.array([[0.0, 0.0, 0.0]]), dt=3, basis="rpz"
).T
np.testing.assert_allclose(dr, 0)
np.testing.assert_allclose(dp, 0)
np.testing.assert_allclose(dz, 2)
c.rotate(angle=np.pi / 2)
c.flip([0, 1, 0])
c.translate([1, 1, 1])
x, y, z = c.compute_coordinates(grid=np.array([[0.0, 0.0, 0.0]]), basis="xyz").T
np.testing.assert_allclose(x, 1)
np.testing.assert_allclose(y, -11)
np.testing.assert_allclose(z, 1)
def test_misc(self):
c = FourierPlanarCurve()
grid = LinearGrid(L=1, M=4, N=4)
c.grid = grid
assert grid.eq(c.grid)
r = c.get_coeffs(0)
np.testing.assert_allclose(r, 2)
c.set_coeffs(0, 3)
np.testing.assert_allclose(
c.r_n,
[
3,
],
)
c.normal = [1, 2, 3]
c.center = [3, 2, 1]
np.testing.assert_allclose(np.linalg.norm(c.normal), 1)
np.testing.assert_allclose(c.normal * np.linalg.norm(c.center), c.center[::-1])
s = c.copy()
assert s.eq(c)
c.change_resolution(5)
with pytest.raises(ValueError):
c.r_n = s.r_n
def test_asserts(self):
c = FourierPlanarCurve()
with pytest.raises(NotImplementedError):
c.compute_coordinates(dt=4)
with pytest.raises(TypeError):
c.grid = [1, 2, 3]
with pytest.raises(ValueError):
c.center = [4]
with pytest.raises(ValueError):
c.normal = [4]
| 35.128289 | 88 | 0.559978 | 1,632 | 10,679 | 3.563113 | 0.061275 | 0.112984 | 0.188306 | 0.288736 | 0.87601 | 0.835254 | 0.805847 | 0.771109 | 0.734996 | 0.69871 | 0 | 0.058603 | 0.276149 | 10,679 | 303 | 89 | 35.244224 | 0.693661 | 0 | 0 | 0.736842 | 0 | 0 | 0.005899 | 0 | 0 | 0 | 0 | 0 | 0.330827 | 1 | 0.078947 | false | 0 | 0.018797 | 0 | 0.109023 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
99dec72a68cce9f64d6e7d62dc9722b0dc903b44 | 45 | py | Python | 01-Lesson-Plans/06-Python-APIs/2/Extra_Content/Stu_CityPressure/Solved/config.py | anirudhmungre/sneaky-lessons | 8e48015c50865059db96f8cd369bcc15365d66c7 | [
"ADSL"
] | 1 | 2018-10-13T18:56:30.000Z | 2018-10-13T18:56:30.000Z | 01-Lesson-Plans/06-Python-APIs/2/Extra_Content/Stu_CityPressure/Solved/config.py | anirudhmungre/sneaky-lessons | 8e48015c50865059db96f8cd369bcc15365d66c7 | [
"ADSL"
] | null | null | null | 01-Lesson-Plans/06-Python-APIs/2/Extra_Content/Stu_CityPressure/Solved/config.py | anirudhmungre/sneaky-lessons | 8e48015c50865059db96f8cd369bcc15365d66c7 | [
"ADSL"
] | null | null | null | api_key = "25bc90a1196e6f153eece0bc0b0fc9eb"
| 22.5 | 44 | 0.866667 | 3 | 45 | 12.666667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.380952 | 0.066667 | 45 | 1 | 45 | 45 | 0.52381 | 0 | 0 | 0 | 0 | 0 | 0.711111 | 0.711111 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8237fbb040af9886dd139140466feca71ba74c0c | 86 | py | Python | generate_todaystring.py | quockhanghrc/Public | 0353a1dee2a88dec09c41a8a51e809409f175274 | [
"Apache-2.0"
] | null | null | null | generate_todaystring.py | quockhanghrc/Public | 0353a1dee2a88dec09c41a8a51e809409f175274 | [
"Apache-2.0"
] | null | null | null | generate_todaystring.py | quockhanghrc/Public | 0353a1dee2a88dec09c41a8a51e809409f175274 | [
"Apache-2.0"
] | null | null | null | def generate_todaystring():
return (datetime.datetime.today()).strftime('%d%b%Y')
| 28.666667 | 57 | 0.709302 | 11 | 86 | 5.454545 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093023 | 86 | 2 | 58 | 43 | 0.769231 | 0 | 0 | 0 | 1 | 0 | 0.069767 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
823912bacd1e54ecb3f813a337003b17a2731fef | 6,280 | py | Python | test/document_filter_test.py | Coveochatbot/megageniale-mlapi | 61666c33a4313c9906d874fa04dd6c6bd45df583 | [
"MIT"
] | null | null | null | test/document_filter_test.py | Coveochatbot/megageniale-mlapi | 61666c33a4313c9906d874fa04dd6c6bd45df583 | [
"MIT"
] | null | null | null | test/document_filter_test.py | Coveochatbot/megageniale-mlapi | 61666c33a4313c9906d874fa04dd6c6bd45df583 | [
"MIT"
] | null | null | null | import unittest
from mlapi.document_filter import DocumentFilter
from mlapi.model.facet import Facet
class TestDocumentFilter(unittest.TestCase):
# Must have section
def test_must_have_facet_a(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_with_facets(self.generate_data(), [Facet("FacetA", "FacetValueA")])
self.assertEqual(3, len(documents))
self.assertTrue("Document1" in documents)
self.assertTrue("Document2" not in documents)
self.assertTrue("Document3" in documents)
self.assertTrue("Document4" in documents)
self.assertTrue("Document5" not in documents)
def test_must_have_facet_d(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_with_facets(self.generate_data(), [Facet("FacetD", "FacetValueD")])
self.assertEqual(0, len(documents))
self.assertTrue("Document1" not in documents)
self.assertTrue("Document2" not in documents)
self.assertTrue("Document3" not in documents)
self.assertTrue("Document4" not in documents)
self.assertTrue("Document5" not in documents)
def test_must_have_facet_a_and_b(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_with_facets(self.generate_data(), [Facet("FacetA", "FacetValueA"), Facet("FacetB", "FacetValueB")])
self.assertEqual(1, len(documents))
self.assertTrue("Document1" in documents)
self.assertTrue("Document2" not in documents)
self.assertTrue("Document3" not in documents)
self.assertTrue("Document4" not in documents)
self.assertTrue("Document5" not in documents)
def test_must_have_facet_a2(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_with_facets(self.generate_data(), [Facet("FacetA", "FacetValueA2")])
self.assertEqual(2, len(documents))
self.assertTrue("Document1" not in documents)
self.assertTrue("Document2" not in documents)
self.assertTrue("Document3" not in documents)
self.assertTrue("Document4" in documents)
self.assertTrue("Document5" in documents)
# Must NOT have section
def test_must_not_have_facet_a(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_without_facets(self.generate_data(), [Facet("FacetA", "FacetValueA")])
self.assertEqual(2, len(documents))
self.assertTrue("Document1" not in documents)
self.assertTrue("Document2" in documents)
self.assertTrue("Document3" not in documents)
self.assertTrue("Document4" not in documents)
self.assertTrue("Document5" in documents)
def test_must_not_have_facet_d(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_without_facets(self.generate_data(), [Facet("FacetD", "FacetValueD")])
self.assertEqual(5, len(documents))
self.assertTrue("Document1" in documents)
self.assertTrue("Document2" in documents)
self.assertTrue("Document3" in documents)
self.assertTrue("Document4" in documents)
self.assertTrue("Document5" in documents)
def test_must_not_have_facet_a_or_b(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_without_facets(self.generate_data(), [Facet("FacetA", "FacetValueA"), Facet("FacetB", "FacetValueB")])
self.assertEqual(1, len(documents))
self.assertTrue("Document1" not in documents)
self.assertTrue("Document2" not in documents)
self.assertTrue("Document3" not in documents)
self.assertTrue("Document4" not in documents)
self.assertTrue("Document5" in documents)
def test_must_not_have_facet_a_or_b_chained(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_without_facets(self.generate_data(), [Facet("FacetA", "FacetValueA")])
documents = document_filter.keep_documents_without_facets(documents, [Facet("FacetB", "FacetValueB")])
self.assertEqual(1, len(documents))
self.assertTrue("Document1" not in documents)
self.assertTrue("Document2" not in documents)
self.assertTrue("Document3" not in documents)
self.assertTrue("Document4" not in documents)
self.assertTrue("Document5" in documents)
# Must have X and NOT have Y section
def test_must_not_have_facet_a_and_not_b(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_with_facets(self.generate_data(), [Facet("FacetA", "FacetValueA")])
documents = document_filter.keep_documents_without_facets(documents, [Facet("FacetB", "FacetValueB")])
self.assertEqual(2, len(documents))
self.assertTrue("Document1" not in documents)
self.assertTrue("Document2" not in documents)
self.assertTrue("Document3" in documents)
self.assertTrue("Document4" in documents)
self.assertTrue("Document5" not in documents)
def test_must_not_have_facet_a_and_not_b_comutative(self):
document_filter = DocumentFilter()
documents = document_filter.keep_documents_without_facets(self.generate_data(), [Facet("FacetB", "FacetValueB")])
documents = document_filter.keep_documents_with_facets(documents, [Facet("FacetA", "FacetValueA")])
self.assertEqual(2, len(documents))
self.assertTrue("Document1" not in documents)
self.assertTrue("Document2" not in documents)
self.assertTrue("Document3" in documents)
self.assertTrue("Document4" in documents)
self.assertTrue("Document5" not in documents)
def generate_data(self):
facetA = Facet("FacetA", "FacetValueA")
facetA2 = Facet("FacetA", "FacetValueA2")
facetB = Facet("FacetB", "FacetValueB")
facetC = Facet("FacetC", "FacetValueC")
return {'Document1': [facetA, facetB],
'Document2': [facetB, facetC],
'Document3': [facetA, facetC],
'Document4': [facetA2, facetA],
'Document5': [facetA2, facetC]}
| 45.507246 | 153 | 0.695223 | 689 | 6,280 | 6.142235 | 0.088534 | 0.153592 | 0.271739 | 0.236295 | 0.892486 | 0.888941 | 0.886106 | 0.875236 | 0.86673 | 0.848062 | 0 | 0.014073 | 0.196656 | 6,280 | 137 | 154 | 45.839416 | 0.824777 | 0.011783 | 0 | 0.682243 | 0 | 0 | 0.132215 | 0 | 0 | 0 | 0 | 0 | 0.560748 | 1 | 0.102804 | false | 0 | 0.028037 | 0 | 0.149533 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
41348f1fa5b2f25a490cd983acd7d59d891fd252 | 3,442 | py | Python | cultivo/cultivo_main/migrations/0008_auto_20181113_0340.py | amanparmar17/cultivo-1 | 06030116ba47f99fee8f413404777c9dbdb4e92a | [
"MIT"
] | 31 | 2018-12-01T17:06:07.000Z | 2022-02-15T13:23:14.000Z | cultivo/cultivo_main/migrations/0008_auto_20181113_0340.py | amanparmar17/cultivo-1 | 06030116ba47f99fee8f413404777c9dbdb4e92a | [
"MIT"
] | 1 | 2021-12-24T13:22:23.000Z | 2021-12-24T13:23:57.000Z | cultivo/cultivo_main/migrations/0008_auto_20181113_0340.py | amanparmar17/cultivo-1 | 06030116ba47f99fee8f413404777c9dbdb4e92a | [
"MIT"
] | 13 | 2020-08-14T05:19:38.000Z | 2022-01-18T13:55:15.000Z | # Generated by Django 2.1.1 on 2018-11-12 22:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cultivo_main', '0007_auto_20181113_0321'),
]
operations = [
migrations.RenameField(
model_name='one',
old_name='GPValue1_million_dollar',
new_name='Gross_Production_Value_constant_2004_2006_1000_dollar',
),
migrations.RenameField(
model_name='one',
old_name='GPValue1_million_slc',
new_name='Gross_Production_Value_constant_2004_2006_million_SLC',
),
migrations.RenameField(
model_name='one',
old_name='GPValue2_million_dollar',
new_name='Gross_Production_Value_constant_2004_2006_million_US_dollar',
),
migrations.RenameField(
model_name='one',
old_name='GPValue2_million_slc',
new_name='Gross_Production_Value_current_million_SLC',
),
migrations.RenameField(
model_name='one',
old_name='GPValue_thousand_dollar',
new_name='Gross_Production_Value_current_million_US_dollar',
),
migrations.RenameField(
model_name='one',
old_name='NPValue_thousand_dollar',
new_name='Net_Production_Value_constant_2004_2006_1000_dollar',
),
migrations.RenameField(
model_name='pred_one',
old_name='GPValue1_million_dollar',
new_name='Gross_Production_Value_constant_2004_2006_1000_dollar',
),
migrations.RenameField(
model_name='pred_one',
old_name='GPValue1_million_slc',
new_name='Gross_Production_Value_constant_2004_2006_million_SLC',
),
migrations.RenameField(
model_name='pred_one',
old_name='GPValue2_million_dollar',
new_name='Gross_Production_Value_constant_2004_2006_million_US_dollar',
),
migrations.RenameField(
model_name='pred_one',
old_name='GPValue2_million_slc',
new_name='Gross_Production_Value_current_million_SLC',
),
migrations.RenameField(
model_name='pred_one',
old_name='GPValue_thousand_dollar',
new_name='Gross_Production_Value_current_million_US_dollar',
),
migrations.RenameField(
model_name='pred_one',
old_name='NPValue_thousand_dollar',
new_name='Net_Production_Value_constant_2004_2006_1000_dollar',
),
migrations.RenameField(
model_name='three',
old_name='domestic',
new_name='Domestic',
),
migrations.RenameField(
model_name='three',
old_name='export',
new_name='Export',
),
migrations.RenameField(
model_name='three',
old_name='imports',
new_name='Imports',
),
migrations.RenameField(
model_name='three',
old_name='production',
new_name='Production',
),
migrations.RenameField(
model_name='three',
old_name='seed',
new_name='Seed',
),
migrations.RenameField(
model_name='three',
old_name='stock',
new_name='Stock',
),
]
| 33.096154 | 83 | 0.595584 | 337 | 3,442 | 5.58457 | 0.166172 | 0.20085 | 0.248672 | 0.286929 | 0.856536 | 0.856536 | 0.856536 | 0.738576 | 0.738576 | 0.722635 | 0 | 0.05036 | 0.313481 | 3,442 | 103 | 84 | 33.417476 | 0.746085 | 0.013074 | 0 | 0.804124 | 1 | 0 | 0.320177 | 0.241237 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.030928 | 0 | 0.061856 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
4154b6349d3c4e3c8ac4c8313f49affd6c8492b3 | 3,082 | py | Python | src/evaluate/metrics.py | aaditkamat/unbiased-pairwise-rec | 4c3e5ed9cbd376791deebd2fd8faa8961cbd8a6e | [
"Apache-2.0"
] | 16 | 2020-01-06T23:10:31.000Z | 2021-07-23T07:19:54.000Z | src/evaluate/metrics.py | aaditkamat/unbiased-pairwise-rec | 4c3e5ed9cbd376791deebd2fd8faa8961cbd8a6e | [
"Apache-2.0"
] | 6 | 2020-01-28T23:14:51.000Z | 2022-02-10T01:50:05.000Z | src/evaluate/metrics.py | aaditkamat/unbiased-pairwise-rec | 4c3e5ed9cbd376791deebd2fd8faa8961cbd8a6e | [
"Apache-2.0"
] | 3 | 2020-02-09T16:05:28.000Z | 2022-03-30T09:21:32.000Z | """Evaluation metrics for collaborative filltering with implicit feedback."""
from typing import Optional
import numpy as np
eps = 1e-3 # propensity clipping
def dcg_at_k(y_true: np.ndarray, y_score: np.ndarray,
k: int, pscore: Optional[np.ndarray] = None) -> float:
"""Calculate a DCG score for a given user."""
y_true_sorted_by_score = y_true[y_score.argsort()[::-1]]
if pscore is not None:
pscore_sorted_by_score = np.maximum(pscore[y_score.argsort()[::-1]], eps)
else:
pscore_sorted_by_score = np.ones_like(y_true_sorted_by_score)
dcg_score = 0.0
final_score = 0.0
k = k if y_true.shape[0] >= k else y_true.shape[0]
if not np.sum(y_true_sorted_by_score) == 0:
dcg_score += y_true_sorted_by_score[0] / pscore_sorted_by_score[0]
for i in np.arange(1, k):
dcg_score += y_true_sorted_by_score[i] / (pscore_sorted_by_score[i] * np.log2(i + 1))
final_score = dcg_score / np.sum(y_true_sorted_by_score) if pscore is None \
else dcg_score / np.sum(1. / pscore_sorted_by_score[y_true_sorted_by_score > 0])
return final_score
def average_precision_at_k(y_true: np.ndarray, y_score: np.ndarray,
k: int, pscore: Optional[np.ndarray] = None) -> float:
"""Calculate a average precision for a given user."""
y_true_sorted_by_score = y_true[y_score.argsort()[::-1]]
if pscore is not None:
pscore_sorted_by_score = np.maximum(pscore[y_score.argsort()[::-1]], eps)
else:
pscore_sorted_by_score = np.ones_like(y_true_sorted_by_score)
average_precision_score = 0.0
final_score = 0.0
k = k if y_true.shape[0] >= k else y_true.shape[0]
if not np.sum(y_true_sorted_by_score) == 0:
for i in np.arange(k):
if y_true_sorted_by_score[i] > 0:
score_ = np.sum(y_true_sorted_by_score[:i + 1] / pscore_sorted_by_score[:i + 1]) / (i + 1)
average_precision_score += score_
final_score = average_precision_score / np.sum(y_true_sorted_by_score) if pscore is None \
else average_precision_score / np.sum(1. / pscore_sorted_by_score[y_true_sorted_by_score > 0])
return final_score
def recall_at_k(y_true: np.ndarray, y_score: np.ndarray,
k: int, pscore: Optional[np.ndarray] = None) -> float:
"""Calculate a recall score for a given user."""
y_true_sorted_by_score = y_true[y_score.argsort()[::-1]]
if pscore is not None:
pscore_sorted_by_score = np.maximum(pscore[y_score.argsort()[::-1]], eps)
else:
pscore_sorted_by_score = np.ones_like(y_true_sorted_by_score)
final_score = 0.
k = k if y_true.shape[0] >= k else y_true.shape[0]
if not np.sum(y_true_sorted_by_score) == 0:
recall_score = np.sum(y_true_sorted_by_score[:k] / pscore_sorted_by_score[:k])
final_score = recall_score / np.sum(y_true_sorted_by_score) if pscore is None \
else recall_score / np.sum(1. / pscore_sorted_by_score[y_true_sorted_by_score > 0])
return final_score
| 40.552632 | 106 | 0.669371 | 510 | 3,082 | 3.698039 | 0.115686 | 0.139979 | 0.227466 | 0.137858 | 0.830859 | 0.791622 | 0.780488 | 0.752386 | 0.73754 | 0.694062 | 0 | 0.017027 | 0.218689 | 3,082 | 75 | 107 | 41.093333 | 0.766196 | 0.072356 | 0 | 0.557692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057692 | false | 0 | 0.038462 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
419296b8a88413e87eef92de77a3830449b1da17 | 10,230 | py | Python | tests/views/test_dashboard.py | jiangrz/flower | 4d6fad197e97c9c36f8052345a348345ef4505a3 | [
"BSD-3-Clause"
] | 2 | 2015-11-06T07:41:38.000Z | 2016-10-11T17:09:17.000Z | tests/views/test_dashboard.py | jiangrz/flower | 4d6fad197e97c9c36f8052345a348345ef4505a3 | [
"BSD-3-Clause"
] | 16 | 2021-04-14T16:56:49.000Z | 2021-04-14T16:57:32.000Z | tests/views/test_dashboard.py | jiangrz/flower | 4d6fad197e97c9c36f8052345a348345ef4505a3 | [
"BSD-3-Clause"
] | 1 | 2021-04-14T16:54:58.000Z | 2021-04-14T16:54:58.000Z | import time
from tests import AsyncHTTPTestCase
from tests.utils import task_succeeded_events, task_failed_events
from tests.utils import HtmlTableParser
from celery.events import Event
from celery.utils import uuid
from flower.events import EventsState
class DashboardTests(AsyncHTTPTestCase):
def setUp(self):
self.app = super(DashboardTests, self).get_app()
super(DashboardTests, self).setUp()
def get_app(self):
return self.app
def test_default_page(self):
r1 = self.get('/')
r2 = self.get('/dashboard')
self.assertEqual(r1.body, r2.body)
def test_no_workers(self):
r = self.get('/dashboard')
self.assertEqual(200, r.code)
self.assertIn('Load Average', str(r.body))
self.assertNotIn('<tr id=', str(r.body))
def test_unknown_worker(self):
r = self.get('/worker/unknown')
self.assertEqual(404, r.code)
self.assertIn('Unknown worker', str(r.body))
def test_single_workers_offline(self):
state = EventsState()
state.get_or_create_worker('worker1')
state.event(Event('worker-online', hostname='worker1',
local_received=time.time()))
state.event(Event('worker-offline', hostname='worker1',
local_received=time.time()))
self.app.events.state = state
r = self.get('/dashboard')
table = HtmlTableParser()
table.parse(str(r.body))
self.assertEqual(200, r.code)
self.assertEqual(1, len(table.rows()))
self.assertTrue(table.get_row('worker1'))
self.assertEqual(['worker1', 'False', '0', '0', '0', '0', '0', None],
table.get_row('worker1'))
self.assertFalse(table.get_row('worker2'))
def test_single_workers_online(self):
state = EventsState()
state.get_or_create_worker('worker1')
state.event(Event('worker-online', hostname='worker1',
local_received=time.time()))
self.app.events.state = state
r = self.get('/dashboard')
table = HtmlTableParser()
table.parse(str(r.body))
self.assertEqual(200, r.code)
self.assertEqual(1, len(table.rows()))
self.assertTrue(table.get_row('worker1'))
self.assertEqual(['worker1', 'True', '0', '0', '0', '0', '0', None],
table.get_row('worker1'))
self.assertFalse(table.get_row('worker2'))
def test_task_received(self):
state = EventsState()
state.get_or_create_worker('worker1')
state.get_or_create_worker('worker2')
events = [Event('worker-online', hostname='worker1'),
Event('worker-online', hostname='worker2'),
Event('task-received', uuid=uuid(), name='task1',
args='(2, 2)', kwargs="{'foo': 'bar'}",
retries=0, eta=None, hostname='worker1')]
for i, e in enumerate(events):
e['clock'] = i
e['local_received'] = time.time()
state.event(e)
self.app.events.state = state
r = self.get('/dashboard')
table = HtmlTableParser()
table.parse(str(r.body))
self.assertEqual(200, r.code)
self.assertEqual(2, len(table.rows()))
self.assertEqual(['worker1', 'True', '0', '1', '0', '0', '0', None],
table.get_row('worker1'))
self.assertEqual(['worker2', 'True', '0', '0', '0', '0', '0', None],
table.get_row('worker2'))
def test_task_started(self):
state = EventsState()
state.get_or_create_worker('worker1')
state.get_or_create_worker('worker2')
events = [Event('worker-online', hostname='worker1'),
Event('worker-online', hostname='worker2'),
Event('task-received', uuid='123', name='task1',
args='(2, 2)', kwargs="{'foo': 'bar'}",
retries=0, eta=None, hostname='worker1'),
Event('task-started', uuid='123', hostname='worker1')]
for i, e in enumerate(events):
e['clock'] = i
e['local_received'] = time.time()
state.event(e)
self.app.events.state = state
r = self.get('/dashboard')
table = HtmlTableParser()
table.parse(str(r.body))
self.assertEqual(200, r.code)
self.assertEqual(2, len(table.rows()))
self.assertEqual(['worker1', 'True', '0', '1', '0', '0', '0', None],
table.get_row('worker1'))
self.assertEqual(['worker2', 'True', '0', '0', '0', '0', '0', None],
table.get_row('worker2'))
def test_task_succeeded(self):
state = EventsState()
state.get_or_create_worker('worker1')
state.get_or_create_worker('worker2')
events = [Event('worker-online', hostname='worker1'),
Event('worker-online', hostname='worker2'),
Event('task-received', uuid='123', name='task1',
args='(2, 2)', kwargs="{'foo': 'bar'}",
retries=0, eta=None, hostname='worker1'),
Event('task-started', uuid='123', hostname='worker1'),
Event('task-succeeded', uuid='123', result='4',
runtime=0.1234, hostname='worker1')]
for i, e in enumerate(events):
e['clock'] = i
e['local_received'] = time.time()
state.event(e)
self.app.events.state = state
r = self.get('/dashboard')
table = HtmlTableParser()
table.parse(str(r.body))
self.assertEqual(200, r.code)
self.assertEqual(2, len(table.rows()))
self.assertEqual(['worker1', 'True', '0', '1', '0', '1', '0', None],
table.get_row('worker1'))
self.assertEqual(['worker2', 'True', '0', '0', '0', '0', '0', None],
table.get_row('worker2'))
def test_task_failed(self):
state = EventsState()
state.get_or_create_worker('worker1')
state.get_or_create_worker('worker2')
events = [Event('worker-online', hostname='worker1'),
Event('worker-online', hostname='worker2'),
Event('task-received', uuid='123', name='task1',
args='(2, 2)', kwargs="{'foo': 'bar'}",
retries=0, eta=None, hostname='worker1'),
Event('task-started', uuid='123', hostname='worker1'),
Event('task-failed', uuid='123', exception="KeyError('foo')",
traceback='line 1 at main', hostname='worker1')]
for i, e in enumerate(events):
e['clock'] = i
e['local_received'] = time.time()
state.event(e)
self.app.events.state = state
r = self.get('/dashboard')
table = HtmlTableParser()
table.parse(str(r.body))
self.assertEqual(200, r.code)
self.assertEqual(2, len(table.rows()))
self.assertEqual(['worker1', 'True', '0', '1', '1', '0', '0', None],
table.get_row('worker1'))
self.assertEqual(['worker2', 'True', '0', '0', '0', '0', '0', None],
table.get_row('worker2'))
def test_task_retried(self):
state = EventsState()
state.get_or_create_worker('worker1')
state.get_or_create_worker('worker2')
events = [Event('worker-online', hostname='worker1'),
Event('worker-online', hostname='worker2'),
Event('task-received', uuid='123', name='task1',
args='(2, 2)', kwargs="{'foo': 'bar'}",
retries=0, eta=None, hostname='worker1'),
Event('task-started', uuid='123', hostname='worker1'),
Event('task-retried', uuid='123', exception="KeyError('bar')",
traceback='line 2 at main', hostname='worker1'),
Event('task-failed', uuid='123', exception="KeyError('foo')",
traceback='line 1 at main', hostname='worker1')]
for i, e in enumerate(events):
e['clock'] = i
e['local_received'] = time.time()
state.event(e)
self.app.events.state = state
r = self.get('/dashboard')
table = HtmlTableParser()
table.parse(str(r.body))
self.assertEqual(200, r.code)
self.assertEqual(2, len(table.rows()))
self.assertEqual(['worker1', 'True', '0', '1', '1', '0', '1', None],
table.get_row('worker1'))
self.assertEqual(['worker2', 'True', '0', '0', '0', '0', '0', None],
table.get_row('worker2'))
def test_tasks(self):
state = EventsState()
state.get_or_create_worker('worker1')
state.get_or_create_worker('worker2')
state.get_or_create_worker('worker3')
events = [Event('worker-online', hostname='worker1'),
Event('worker-online', hostname='worker2')]
for i in range(100):
events += task_succeeded_events(worker='worker1')
for i in range(10):
events += task_succeeded_events(worker='worker3')
for i in range(13):
events += task_failed_events(worker='worker3')
for i, e in enumerate(events):
e['clock'] = i
e['local_received'] = time.time()
state.event(e)
self.app.events.state = state
r = self.get('/dashboard')
table = HtmlTableParser()
table.parse(str(r.body))
self.assertEqual(200, r.code)
self.assertEqual(3, len(table.rows()))
self.assertEqual(['worker1', 'True', '0', '100', '0', '100', '0', None],
table.get_row('worker1'))
self.assertEqual(['worker2', 'True', '0', '0', '0', '0', '0', None],
table.get_row('worker2'))
self.assertEqual(['worker3', 'True', '0', '23', '13', '10', '0', None],
table.get_row('worker3'))
| 38.171642 | 80 | 0.533431 | 1,139 | 10,230 | 4.696225 | 0.093942 | 0.013834 | 0.014582 | 0.011965 | 0.838101 | 0.799402 | 0.792485 | 0.791737 | 0.784446 | 0.784446 | 0 | 0.038521 | 0.299609 | 10,230 | 267 | 81 | 38.314607 | 0.708025 | 0 | 0 | 0.733645 | 0 | 0 | 0.153666 | 0 | 0 | 0 | 0 | 0 | 0.191589 | 1 | 0.060748 | false | 0 | 0.03271 | 0.004673 | 0.102804 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
68dc5bdaf24f0a3a806ab64640e53425cd169dbc | 170 | py | Python | tests/constants.py | hurusystems/sqlalchemy-queryfilter | c9295e9e7c623dceb4c86aac9419a1afd6ef0f37 | [
"MIT"
] | null | null | null | tests/constants.py | hurusystems/sqlalchemy-queryfilter | c9295e9e7c623dceb4c86aac9419a1afd6ef0f37 | [
"MIT"
] | 1 | 2020-02-03T21:12:45.000Z | 2020-02-03T21:12:45.000Z | tests/constants.py | hurusystems/sqlalchemy-queryfilter | c9295e9e7c623dceb4c86aac9419a1afd6ef0f37 | [
"MIT"
] | 1 | 2022-03-22T19:14:12.000Z | 2022-03-22T19:14:12.000Z | SQL = 'SELECT "table".id AS table_id, "table".name AS table_name, "table".description AS table_description, "table".created_date AS table_created_date \nFROM "table" \n'
| 85 | 169 | 0.764706 | 27 | 170 | 4.592593 | 0.407407 | 0.225806 | 0.258065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.105882 | 170 | 1 | 170 | 170 | 0.815789 | 0 | 0 | 0 | 0 | 1 | 0.947059 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ec03d42e64a5accba805cf050e61aaf9d128463b | 7,499 | py | Python | src/node_classification.py | Anonymous-DL/MAGNET | 5926ca79ae03010289c1e522f8df41aa79de5edc | [
"MIT"
] | 13 | 2021-05-14T15:30:16.000Z | 2022-01-21T20:58:32.000Z | src/node_classification.py | Anonymous-DL/MAGNET | 5926ca79ae03010289c1e522f8df41aa79de5edc | [
"MIT"
] | 1 | 2021-11-28T12:20:43.000Z | 2021-12-13T18:26:55.000Z | src/node_classification.py | Anonymous-DL/MAGNET | 5926ca79ae03010289c1e522f8df41aa79de5edc | [
"MIT"
] | 2 | 2021-05-18T12:31:41.000Z | 2021-12-22T22:18:27.000Z | import os, sys
epochs = '3000'
for data in [
'WebKB/Cornell', 'WebKB/Texas', 'WebKB/Wisconsin',
'cora_ml/',
'citeseer_npz/',
'syn/syn1',
'syn/syn2',
'syn/syn3'
]:
for lr in [1e-3, 1e-2, 5e-3]:
# MagNet
log_path = data
for num_filter in [16, 32, 64]:
for q in [0.01, 0.05, 0.1, 0.15, 0.2, 0.25]:
command = ('python sparse_MagNet.py '
+' --dataset='+data
+' --q='+str(q)
+' --num_filter='+str(num_filter)
+' --K=1'
+' --log_path='+str(log_path)
+' --layer=2'
+' --epochs='+epochs
+' --dropout=0.5'
+' --lr='+str(lr)
+' -a')
print(command)
os.system(command)
log_path = 'Sym_' + data
for num_filter in [5, 15, 30]:
command = ('python Sym_DiGCN.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' --epochs='+epochs)
print(command)
os.system(command)
log_path = 'GCN_' + data
for num_filter in [16, 32, 64]:
command = ('python GCN.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' --epochs='+epochs)
print(command)
os.system(command)
command = ('python GCN.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --epochs='+epochs
+' --lr='+str(lr)
+' -tud')
print(command)
os.system(command)
log_path = 'Cheb_' + data
for num_filter in [16, 32, 64]:
command = ('python Cheb.py '
+' --dataset='+data
+' --K=2'
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' --epochs='+epochs)
print(command)
os.system(command)
log_path = 'SAGE_' + data
for num_filter in [16, 32, 64]:
command = ('python SAGE.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' --epochs='+epochs)
print(command)
os.system(command)
command = ('python SAGE.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' -tud'
+' --epochs='+epochs)
print(command)
os.system(command)
log_path = 'GAT_' + data
for heads in [2, 4, 8]:
for num_filter in [16, 32, 64]:
command = ('python GAT.py '
+' --dataset='+data
+' --heads='+str(heads)
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' --epochs='+epochs)
print(command)
os.system(command)
command = ('python GAT.py '
+' --dataset='+data
+' --heads='+str(heads)
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' -tud'
+' --epochs='+epochs)
print(command)
os.system(command)
log_path = 'GIN_' + data
for num_filter in [16, 32, 64]:
command = ('python GIN.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' --epochs='+epochs)
print(command)
os.system(command)
command = ('python GIN.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --dropout=0.5'
+' --lr='+str(lr)
+' -tud'
+' --epochs='+epochs)
print(command)
os.system(command)
# K=10 following the original paper
log_path = 'APPNP_' + data
for num_filter in [16, 32, 64]:
for alpha in [0.05, 0.1, 0.15, 0.2]:
command = ('python APPNP.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --alpha='+str(alpha)
+' --dropout=0.5'
+' --lr='+str(lr)
+' --epochs='+epochs)
print(command)
os.system(command)
command = ('python APPNP.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --epochs='+epochs
+' --lr='+str(lr)
+' --alpha='+str(alpha)
+' --dropout=0.5'
+' -tud')
print(command)
os.system(command)
log_path = 'DiG_' + data
for num_filter in [16, 32, 64]:
for alpha in [0.05, 0.1, 0.15, 0.2]:
command = ('python Digraph.py '
+' --dataset='+data
+' --num_filter='+str(num_filter)
+' --log_path='+str(log_path)
+' --alpha='+str(alpha)
+' --dropout=0.5'
+' --lr='+str(lr)
+' --epochs='+epochs)
print(command)
os.system(command)
| 40.317204 | 63 | 0.32671 | 619 | 7,499 | 3.819063 | 0.119548 | 0.10956 | 0.076988 | 0.088832 | 0.857022 | 0.817259 | 0.801184 | 0.786802 | 0.751269 | 0.731387 | 0 | 0.039005 | 0.528204 | 7,499 | 186 | 64 | 40.317204 | 0.629169 | 0.005334 | 0 | 0.816092 | 0 | 0 | 0.185732 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.005747 | 0 | 0.005747 | 0.08046 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6b9ec37064a921ed7534c355432f9714f70608dd | 433 | py | Python | gdsfactory/tests/test_rotate.py | jorgepadilla19/gdsfactory | 68e1c18257a75d4418279851baea417c8899a165 | [
"MIT"
] | null | null | null | gdsfactory/tests/test_rotate.py | jorgepadilla19/gdsfactory | 68e1c18257a75d4418279851baea417c8899a165 | [
"MIT"
] | null | null | null | gdsfactory/tests/test_rotate.py | jorgepadilla19/gdsfactory | 68e1c18257a75d4418279851baea417c8899a165 | [
"MIT"
] | null | null | null | import gdsfactory as gf
def test_rotate():
c1 = gf.components.straight()
c1r = c1.rotate()
c2 = gf.components.straight()
c2r = c2.rotate()
assert c1.uid == c2.uid
assert c1r.uid == c2r.uid
if __name__ == "__main__":
c1 = gf.components.straight()
c1r = c1.rotate()
c2 = gf.components.straight()
c2r = c2.rotate()
assert c1.uid == c2.uid
assert c1r.uid == c2r.uid
c2r.show()
| 17.32 | 33 | 0.598152 | 60 | 433 | 4.166667 | 0.316667 | 0.192 | 0.32 | 0.176 | 0.8 | 0.8 | 0.8 | 0.8 | 0.8 | 0.8 | 0 | 0.065217 | 0.256351 | 433 | 24 | 34 | 18.041667 | 0.71118 | 0 | 0 | 0.75 | 0 | 0 | 0.018476 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.0625 | false | 0 | 0.0625 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d46f7bbd409b223cda98ef7088a79ed9e24ec26d | 118 | py | Python | simuvex/simuvex/engines/vex/expressions/vecret.py | Ruide/angr-dev | 964dc80c758e25c698c2cbcc454ef5954c5fa0a0 | [
"BSD-2-Clause"
] | 86 | 2015-08-06T23:25:07.000Z | 2022-02-17T14:58:22.000Z | simuvex/simuvex/engines/vex/expressions/vecret.py | Ruide/angr-dev | 964dc80c758e25c698c2cbcc454ef5954c5fa0a0 | [
"BSD-2-Clause"
] | 132 | 2015-09-10T19:06:59.000Z | 2018-10-04T20:36:45.000Z | simuvex/simuvex/engines/vex/expressions/vecret.py | Ruide/angr-dev | 964dc80c758e25c698c2cbcc454ef5954c5fa0a0 | [
"BSD-2-Clause"
] | 80 | 2015-08-07T10:30:20.000Z | 2020-03-21T14:45:28.000Z | print '... Importing simuvex/engines/vex/expressions/vecret.py ...'
from angr.engines.vex.expressions.vecret import *
| 39.333333 | 67 | 0.771186 | 15 | 118 | 6.066667 | 0.733333 | 0.21978 | 0.461538 | 0.593407 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.076271 | 118 | 2 | 68 | 59 | 0.834862 | 0 | 0 | 0 | 0 | 0 | 0.5 | 0.347458 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 1 | null | null | 0.5 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 8 |
2e0c3b7adbcc01121d3e1dafabe8f78713af0cc4 | 3,711 | py | Python | Selenium/tests_UI.py | wonjoonSeol/ScienceScape | 8d8a3cb76193b6f85b7a2a6c7219e249237d64c8 | [
"BSD-3-Clause"
] | 5 | 2018-02-14T21:11:06.000Z | 2020-02-23T14:53:11.000Z | Selenium/tests_UI.py | wonjoonSeol/ScienceScape | 8d8a3cb76193b6f85b7a2a6c7219e249237d64c8 | [
"BSD-3-Clause"
] | 106 | 2018-02-09T00:31:05.000Z | 2018-03-29T07:28:34.000Z | Selenium/tests_UI.py | wonjoonSeol/ScienceScape | 8d8a3cb76193b6f85b7a2a6c7219e249237d64c8 | [
"BSD-3-Clause"
] | 6 | 2018-02-23T17:48:03.000Z | 2020-05-14T13:39:36.000Z | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from django.test import TestCase
from django.contrib.auth.models import User
class TestUI(TestCase):
def test_login_with_unregistered_credentials(self):
browser_driver = webdriver.Chrome()
browser_driver.get("http://127.0.0.1:8000/")
login_collapsible = browser_driver.find_element_by_xpath("/html/body/div[4]/div[3]/ul/li[1]/div[1]/i")
login_collapsible.click()
username_field = browser_driver.find_element_by_xpath('//*[@id="login-username"]')
username_field.send_keys("myusername")
password_field = browser_driver.find_element_by_xpath('//*[@id="login-password"]')
password_field.send_keys("mypassword")
submit_button = browser_driver.find_element_by_xpath('/html/body/div[4]/div[3]/ul/li[1]/div[2]/div/form/button')
submit_button.click()
self.assertIn("Incorrect credentials", browser_driver.page_source)
register_collapsible = browser_driver.find_element_by_xpath("/html/body/div[4]/div[3]/ul/li[2]/div[1]")
register_collapsible.click()
username_field = browser_driver.find_element_by_xpath('//*[@id="id_username"]')
username_field.send_keys("avalidusername2")
email_field = browser_driver.find_element_by_xpath('//*[@id="id_email"]')
email_field.send_keys("email@email.abc2")
password_field = browser_driver.find_element_by_xpath('//*[@id="id_password"]')
password_field.send_keys("itsasecret2")
submit_button = browser_driver.find_element_by_xpath('/html/body/div[4]/div[3]/ul/li[2]/div[2]/div/form/button')
submit_button.click()
self.assertIn("are logged in as", browser_driver.page_source)
browser_driver.quit()
def test_registration_when_user_already_exists(self):
browser_driver = webdriver.Chrome()
browser_driver.get("http://127.0.0.1:8000/")
register_collapsible = browser_driver.find_element_by_xpath("/html/body/div[4]/div[3]/ul/li[2]/div[1]")
register_collapsible.click()
username_field = browser_driver.find_element_by_xpath('//*[@id="id_username"]')
username_field.send_keys("avalidusername")
email_field = browser_driver.find_element_by_xpath('//*[@id="id_email"]')
email_field.send_keys("email@email.abc")
password_field = browser_driver.find_element_by_xpath('//*[@id="id_password"]')
password_field.send_keys("itsasecret")
submit_button = browser_driver.find_element_by_xpath('/html/body/div[4]/div[3]/ul/li[2]/div[2]/div/form/button')
submit_button.click()
self.assertIn("already exists", browser_driver.page_source)
browser_driver.quit()
def test_registration_and_login_for_new_user(self):
browser_driver = webdriver.Chrome()
browser_driver.get("http://127.0.0.1:8000/")
register_collapsible = browser_driver.find_element_by_xpath("/html/body/div[4]/div[3]/ul/li[2]/div[1]")
register_collapsible.click()
username_field = browser_driver.find_element_by_xpath('//*[@id="id_username"]')
username_field.send_keys("avalidusername")
email_field = browser_driver.find_element_by_xpath('//*[@id="id_email"]')
email_field.send_keys("email@email.abc")
password_field = browser_driver.find_element_by_xpath('//*[@id="id_password"]')
password_field.send_keys("itsasecret")
submit_button = browser_driver.find_element_by_xpath('/html/body/div[4]/div[3]/ul/li[2]/div[2]/div/form/button')
submit_button.click()
self.assertIn("are logged in as", browser_driver.page_source)
browser_driver.quit()
| 55.38806 | 120 | 0.704123 | 505 | 3,711 | 4.845545 | 0.156436 | 0.170004 | 0.131998 | 0.186351 | 0.857785 | 0.834083 | 0.834083 | 0.834083 | 0.834083 | 0.811197 | 0 | 0.020615 | 0.150364 | 3,711 | 66 | 121 | 56.227273 | 0.755471 | 0 | 0 | 0.661017 | 0 | 0.135593 | 0.242049 | 0.1531 | 0 | 0 | 0 | 0 | 0.067797 | 1 | 0.050847 | false | 0.135593 | 0.067797 | 0 | 0.135593 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
cf25fecee6d332af764290d32c13376aa06be7b1 | 14,208 | py | Python | longOne.py | to314as/bir_tools | 9acf587a0e2d13cc0ac02e5aaf3652447568c19a | [
"MIT"
] | null | null | null | longOne.py | to314as/bir_tools | 9acf587a0e2d13cc0ac02e5aaf3652447568c19a | [
"MIT"
] | null | null | null | longOne.py | to314as/bir_tools | 9acf587a0e2d13cc0ac02e5aaf3652447568c19a | [
"MIT"
] | 1 | 2020-10-28T12:26:07.000Z | 2020-10-28T12:26:07.000Z | import torch.nn as nn
import torch
from torch.nn import functional as F
import numpy as np
from complexFunctions import complex_relu, complex_max_pool2d,complex_dropout, complex_dropout2d
from complexLayers import ComplexConv2d,ComplexConvTranspose2d,ComplexConvTranspose3d,ComplexSequential
import numpy.fft as nf
import os
import sys
sys.path.append('/mnt/mnt/5TB_slot2/Tobias/TobiasPy/fastMRI')
from models.unet.unet_model import UnetModel as UnetModel
class ConvBlock(nn.Module):
"""
A Convolutional Block that consists of two convolution layers each followed by
instance normalization, LeakyReLU activation and dropout.
"""
def __init__(self, in_chans, out_chans, drop_prob):
"""
Args:
in_chans (int): Number of channels in the input.
out_chans (int): Number of channels in the output.
drop_prob (float): Dropout probability.
"""
super().__init__()
self.in_chans = in_chans
self.out_chans = out_chans
self.drop_prob = drop_prob
self.layers = nn.Sequential(
nn.Conv2d(in_chans, out_chans, kernel_size=3, padding=1, bias=False),
nn.InstanceNorm2d(out_chans),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Dropout2d(drop_prob),
nn.Conv2d(out_chans, out_chans, kernel_size=3, padding=1, bias=False),
nn.InstanceNorm2d(out_chans),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Dropout2d(drop_prob)
)
def forward(self, input):
"""
Args:
input (torch.Tensor): Input tensor of shape [batch_size, self.in_chans, height, width]
Returns:
(torch.Tensor): Output tensor of shape [batch_size, self.out_chans, height, width]
"""
return self.layers(input)
def __repr__(self):
return f'ConvBlock(in_chans={self.in_chans}, out_chans={self.out_chans}, ' \
f'drop_prob={self.drop_prob})'
class ComplexConvBlock(nn.Module):
"""
A Convolutional Block that consists of two convolution layers each followed by
instance normalization, LeakyReLU activation and dropout.
"""
def __init__(self, in_chans, out_chans, drop_prob):
"""
Args:
in_chans (int): Number of channels in the input.
out_chans (int): Number of channels in the output.
drop_prob (float): Dropout probability.
"""
super().__init__()
self.in_chans = in_chans
self.out_chans = out_chans
self.drop_prob = drop_prob
self.layers = nn.Sequential(
nn.Conv2d(in_chans, out_chans, kernel_size=3, padding=1, bias=False),
nn.InstanceNorm2d(out_chans),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Dropout2d(drop_prob),
nn.Conv2d(out_chans, out_chans, kernel_size=3, padding=1, bias=False),
nn.InstanceNorm2d(out_chans),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Dropout2d(drop_prob)
)
def forward(self, input):
"""
Args:
input (torch.Tensor): Input tensor of shape [batch_size, self.in_chans, height, width]
Returns:
(torch.Tensor): Output tensor of shape [batch_size, self.out_chans, height, width]
"""
return self.layers(input)
def __repr__(self):
return f'ConvBlock(in_chans={self.in_chans}, out_chans={self.out_chans}, ' \
f'drop_prob={self.drop_prob})'
class ComplexTransposeConvBlock(nn.Module):
"""
A Transpose Convolutional Block that consists of one convolution transpose layers followed by
instance normalization and LeakyReLU activation.
"""
def __init__(self, in_chans, out_chans):
"""
Args:
in_chans (int): Number of channels in the input.
out_chans (int): Number of channels in the output.
"""
super().__init__()
self.in_chans = in_chans
self.out_chans = out_chans
self.layers = nn.Sequential(
nn.ConvTranspose2d(in_chans, out_chans, kernel_size=2, stride=2, bias=False),
nn.InstanceNorm2d(out_chans),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
)
def forward(self, input):
"""
Args:
input (torch.Tensor): Input tensor of shape [batch_size, self.in_chans, height, width]
Returns:
(torch.Tensor): Output tensor of shape [batch_size, self.out_chans, height, width]
"""
return self.layers(input)
def __repr__(self):
return f'ConvBlock(in_chans={self.in_chans}, out_chans={self.out_chans})'
class TransposeConvBlock(nn.Module):
"""
A Transpose Convolutional Block that consists of one convolution transpose layers followed by
instance normalization and LeakyReLU activation.
"""
def __init__(self, in_chans, out_chans):
"""
Args:
in_chans (int): Number of channels in the input.
out_chans (int): Number of channels in the output.
"""
super().__init__()
self.in_chans = in_chans
self.out_chans = out_chans
self.layers = nn.Sequential(
nn.ConvTranspose2d(in_chans, out_chans, kernel_size=2, stride=2, bias=False),
nn.InstanceNorm2d(out_chans),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
)
def forward(self, input):
"""
Args:
input (torch.Tensor): Input tensor of shape [batch_size, self.in_chans, height, width]
Returns:
(torch.Tensor): Output tensor of shape [batch_size, self.out_chans, height, width]
"""
return self.layers(input)
def __repr__(self):
return f'ConvBlock(in_chans={self.in_chans}, out_chans={self.out_chans})'
class ComplexFourier(nn.Module):
def __init__(self, in_chans, out_chans, drop_prob, resolution):
super().__init__()
self.in_chans = in_chans
self.out_chans = out_chans
self.drop_prob = drop_prob
self.resolution= resolution
self.layer1=ComplexConv2d(in_channels=1, out_channels=resolution, kernel_size=(1,resolution),padding=(0,0), stride=1, bias=False)
self.layer2=ComplexConv2d(in_channels=1, out_channels=resolution, kernel_size=(1,resolution),padding=(0,0), stride=1, bias=False)
def forward(self, input):
"""
Args:
input (torch.Tensor): Input tensor of shape [batch_size, self.in_chans, height, width]
Returns:
(torch.Tensor): Output tensor of shape [batch_size, self.out_chans, height, width]
"""
if len(input.shape)>5:
input.squeeze(1)
input_r=input[...,0]
input_i=input[...,1]
#print("in",input_r.shape)
output_r,output_i=self.layer1(input_r,input_i)
output_r,output_i=output_r.squeeze(-1).unsqueeze(1),output_i.squeeze(-1).unsqueeze(1)
#print("out",output_r.shape)
output_r,output_i=self.layer2(output_r,output_i)
output_r,output_i=output_r.squeeze(-1),output_i.squeeze(-1)
#print("out2",output_r.shape)
return (output_r**2+output_i**2)**(1/2)
class ComplexEndToEnd(nn.Module):
def __init__(self, in_chans, out_chans, drop_prob, chans, num_pool_layers, resolution):
super().__init__()
self.in_chans = in_chans
self.out_chans = out_chans
self.drop_prob = drop_prob
self.resolution= resolution
self.layer1=ComplexConv2d(in_channels=1, out_channels=resolution, kernel_size=(1,resolution),padding=(0,0), stride=1, bias=False)
self.layer2=ComplexConv2d(in_channels=1, out_channels=resolution, kernel_size=(1,resolution),padding=(0,0), stride=1, bias=False)
self.chans = chans
self.num_pool_layers = num_pool_layers
self.down_sample_layers = nn.ModuleList([ConvBlock(in_chans, chans, drop_prob)])
ch = chans
for i in range(num_pool_layers - 1):
self.down_sample_layers += [ConvBlock(ch, ch * 2, drop_prob)]
ch *= 2
self.conv = ConvBlock(ch, ch * 2, drop_prob)
self.up_conv = nn.ModuleList()
self.up_transpose_conv = nn.ModuleList()
for i in range(num_pool_layers - 1):
self.up_transpose_conv += [TransposeConvBlock(ch * 2, ch)]
self.up_conv += [ConvBlock(ch * 2, ch, drop_prob)]
ch //= 2
self.up_transpose_conv += [TransposeConvBlock(ch * 2, ch)]
self.up_conv += [
nn.Sequential(
ConvBlock(ch * 2, ch, drop_prob),
nn.Conv2d(ch, self.out_chans, kernel_size=1, stride=1),
)]
def forward(self, input):
"""
Args:
input (torch.Tensor): Input tensor of shape [batch_size, self.in_chans, height, width]
Returns:
(torch.Tensor): Output tensor of shape [batch_size, self.out_chans, height, width]
"""
if len(input.shape)>5:
input.squeeze(1)
input_r=input[...,0]
input_i=input[...,1]
#print("in",input_r.shape)
output_r,output_i=self.layer1(input_r,input_i)
output_r,output_i=output_r.squeeze(-1).unsqueeze(1),output_i.squeeze(-1).unsqueeze(1)
#print("out",output_r.shape)
output_r,output_i=self.layer2(output_r,output_i)
output_r,output_i=output_r.squeeze(-1).unsqueeze(1),output_i.squeeze(-1).unsqueeze(1)
output_mag=(output_r**2+output_i**2)**(1/2)
output=output_mag
stack = []
# Apply down-sampling layers
for i, layer in enumerate(self.down_sample_layers):
output = layer(output)
stack.append(output)
output = F.avg_pool2d(output, kernel_size=2, stride=2, padding=0)
output = self.conv(output)
# Apply up-sampling layers
for transpose_conv, conv in zip(self.up_transpose_conv, self.up_conv):
downsample_layer = stack.pop()
output = transpose_conv(output)
# Reflect pad on the right/botton if needed to handle odd input dimensions.
padding = [0, 0, 0, 0]
if output.shape[-1] != downsample_layer.shape[-1]:
padding[1] = 1 # Padding right
if output.shape[-2] != downsample_layer.shape[-2]:
padding[3] = 1 # Padding bottom
if sum(padding) != 0:
output = F.pad(output, padding, "reflect")
output = torch.cat([output, downsample_layer], dim=1)
output = conv(output)
return output
class KspaceEndToEnd(nn.Module):
def __init__(self, in_chans, out_chans, drop_prob, chans, num_pool_layers, resolution):
super().__init__()
self.in_chans = in_chans
self.out_chans = out_chans
self.drop_prob = drop_prob
self.resolution= resolution
self.layer1=ComplexConv2d(in_channels=1, out_channels=resolution, kernel_size=(1,resolution),padding=(0,0), stride=1, bias=False)
self.layer2=ComplexConv2d(in_channels=1, out_channels=resolution, kernel_size=(1,resolution),padding=(0,0), stride=1, bias=False)
self.chans = chans
self.num_pool_layers = num_pool_layers
self.down_sample_layers = nn.ModuleList([ConvBlock(in_chans, chans, drop_prob)])
ch = chans
for i in range(num_pool_layers - 1):
self.down_sample_layers += [ConvBlock(ch, ch * 2, drop_prob)]
ch *= 2
self.conv = ConvBlock(ch, ch * 2, drop_prob)
self.up_conv = nn.ModuleList()
self.up_transpose_conv = nn.ModuleList()
for i in range(num_pool_layers - 1):
self.up_transpose_conv += [TransposeConvBlock(ch * 2, ch)]
self.up_conv += [ConvBlock(ch * 2, ch, drop_prob)]
ch //= 2
self.up_transpose_conv += [TransposeConvBlock(ch * 2, ch)]
self.up_conv += [
nn.Sequential(
ConvBlock(ch * 2, ch, drop_prob),
nn.Conv2d(ch, self.out_chans, kernel_size=1, stride=1),
)]
def forward(self, input):
"""
Args:
input (torch.Tensor): Input tensor of shape [batch_size, self.in_chans, height, width]
Returns:
(torch.Tensor): Output tensor of shape [batch_size, self.out_chans, height, width]
"""
if len(input.shape)>5:
input.squeeze(1)
input_r=input[...,0]
input_i=input[...,1]
#print("in",input_r.shape)
stack = []
# Apply down-sampling layers
for i, layer in enumerate(self.down_sample_layers):
output = layer(output)
stack.append(output)
output = F.avg_pool2d(output, kernel_size=2, stride=2, padding=0)
output = self.conv(output)
# Apply up-sampling layers
for transpose_conv, conv in zip(self.up_transpose_conv, self.up_conv):
downsample_layer = stack.pop()
output = transpose_conv(output)
# Reflect pad on the right/botton if needed to handle odd input dimensions.
padding = [0, 0, 0, 0]
if output.shape[-1] != downsample_layer.shape[-1]:
padding[1] = 1 # Padding right
if output.shape[-2] != downsample_layer.shape[-2]:
padding[3] = 1 # Padding bottom
if sum(padding) != 0:
output = F.pad(output, padding, "reflect")
output = torch.cat([output, downsample_layer], dim=1)
output = conv(output)
output_r,output_i=self.layer1(input_r,input_i)
output_r,output_i=output_r.squeeze(-1).unsqueeze(1),output_i.squeeze(-1).unsqueeze(1)
#print("out",output_r.shape)
output_r,output_i=self.layer2(output_r,output_i)
output_r,output_i=output_r.squeeze(-1).unsqueeze(1),output_i.squeeze(-1).unsqueeze(1)
output_mag=(output_r**2+output_i**2)**(1/2)
output=output_mag
return output | 37.888 | 137 | 0.614443 | 1,830 | 14,208 | 4.54918 | 0.085246 | 0.053814 | 0.033033 | 0.027027 | 0.932492 | 0.930691 | 0.930691 | 0.930691 | 0.928529 | 0.928529 | 0 | 0.019725 | 0.2721 | 14,208 | 375 | 138 | 37.888 | 0.785245 | 0.20777 | 0 | 0.885321 | 0 | 0 | 0.034137 | 0.032261 | 0 | 0 | 0 | 0 | 0 | 1 | 0.082569 | false | 0 | 0.045872 | 0.018349 | 0.211009 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cf497c4db1b99ccb9d0565f3d7d4d168a301f0c6 | 13,051 | py | Python | pyvetherpools/pools_vether_abi.py | vetherasset/py-vether-pools | 38f5aa60a90bdfa8cb7035f16978f73896f6fdcd | [
"Unlicense"
] | 1 | 2021-05-02T01:23:57.000Z | 2021-05-02T01:23:57.000Z | pyvetherpools/pools_vether_abi.py | vetherasset/py-vether-pools | 38f5aa60a90bdfa8cb7035f16978f73896f6fdcd | [
"Unlicense"
] | null | null | null | pyvetherpools/pools_vether_abi.py | vetherasset/py-vether-pools | 38f5aa60a90bdfa8cb7035f16978f73896f6fdcd | [
"Unlicense"
] | null | null | null | pools_vether_abi = """[
{
"inputs": [
{
"internalType": "address",
"name": "_base",
"type": "address"
},
{
"internalType": "address",
"name": "_token",
"type": "address"
},
{
"internalType": "contract iDAO",
"name": "_dao",
"type": "address"
}
],
"stateMutability": "payable",
"type": "constructor"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "owner",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "spender",
"type": "address"
},
{
"indexed": false,
"internalType": "uint256",
"name": "value",
"type": "uint256"
}
],
"name": "Approval",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "from",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "to",
"type": "address"
},
{
"indexed": false,
"internalType": "uint256",
"name": "value",
"type": "uint256"
}
],
"name": "Transfer",
"type": "event"
},
{
"inputs": [],
"name": "BASE",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "DAO",
"outputs": [
{
"internalType": "contract iDAO",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "TOKEN",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "_volume",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "_fee",
"type": "uint256"
}
],
"name": "_addPoolMetrics",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "_checkApprovals",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "_baseAmt",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "_tokenAmt",
"type": "uint256"
}
],
"name": "_decrementPoolBalances",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "_baseAmt",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "_tokenAmt",
"type": "uint256"
}
],
"name": "_incrementPoolBalances",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "account",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "_mint",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "_baseAmt",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "_tokenAmt",
"type": "uint256"
}
],
"name": "_setPoolAmounts",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "_baseAmt",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "_tokenAmt",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "_baseAmtStaked",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "_tokenAmtStaked",
"type": "uint256"
}
],
"name": "_setPoolBalances",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "token",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "add",
"outputs": [
{
"internalType": "bool",
"name": "success",
"type": "bool"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "owner",
"type": "address"
},
{
"internalType": "address",
"name": "spender",
"type": "address"
}
],
"name": "allowance",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "spender",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "approve",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "account",
"type": "address"
}
],
"name": "balanceOf",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "baseAmt",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "baseAmtStaked",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "burn",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "from",
"type": "address"
},
{
"internalType": "uint256",
"name": "value",
"type": "uint256"
}
],
"name": "burnFrom",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "decimals",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "fees",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "genesis",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "name",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "one",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "symbol",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "sync",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "tokenAmt",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "tokenAmtStaked",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "totalSupply",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "to",
"type": "address"
},
{
"internalType": "uint256",
"name": "value",
"type": "uint256"
}
],
"name": "transfer",
"outputs": [
{
"internalType": "bool",
"name": "success",
"type": "bool"
}
],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address payable",
"name": "to",
"type": "address"
},
{
"internalType": "uint256",
"name": "value",
"type": "uint256"
}
],
"name": "transferETH",
"outputs": [
{
"internalType": "bool",
"name": "success",
"type": "bool"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "from",
"type": "address"
},
{
"internalType": "address",
"name": "to",
"type": "address"
},
{
"internalType": "uint256",
"name": "value",
"type": "uint256"
}
],
"name": "transferFrom",
"outputs": [
{
"internalType": "bool",
"name": "success",
"type": "bool"
}
],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "recipient",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "transferTo",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "txCount",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "volume",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"stateMutability": "payable",
"type": "receive"
}
]
""" | 20.360374 | 44 | 0.366255 | 689 | 13,051 | 6.902758 | 0.101597 | 0.120269 | 0.174096 | 0.117325 | 0.875105 | 0.853659 | 0.835576 | 0.7664 | 0.733179 | 0.646552 | 0 | 0.029756 | 0.443797 | 13,051 | 641 | 45 | 20.360374 | 0.625431 | 0 | 0 | 0.595944 | 0 | 0 | 0.998008 | 0.003831 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cf8a0dfb91179183837abeea5a409767729c399d | 23,886 | py | Python | tests/feature/jsonnet/test_jsonnet.py | gfi-centre-ouest/docker-devbox-ddb | 1597d85ef6e9e8322cce195a454de54186ce9ec7 | [
"MIT"
] | 4 | 2020-06-11T20:54:47.000Z | 2020-09-22T13:07:17.000Z | tests/feature/jsonnet/test_jsonnet.py | gfi-centre-ouest/docker-devbox-ddb | 1597d85ef6e9e8322cce195a454de54186ce9ec7 | [
"MIT"
] | 113 | 2019-11-07T00:40:36.000Z | 2021-01-18T12:50:16.000Z | tests/feature/jsonnet/test_jsonnet.py | inetum-orleans/docker-devbox-ddb | 20c713cf7bfcaf289226a17a9648c17d16003b4d | [
"MIT"
] | null | null | null | import os
import pathlib
import re
import pytest
import yaml
from ddb.__main__ import load_registered_features, register_actions_in_event_bus
from ddb.config import config, migrations
from ddb.config.migrations import PropertyMigration
from ddb.feature import features
from ddb.feature.core import CoreFeature
from ddb.feature.docker import DockerFeature
from ddb.feature.file import FileFeature, FileWalkAction
from ddb.feature.jsonnet import JsonnetFeature
class TestJsonnetAction:
def test_empty_project_without_core(self, project_loader):
project_loader("empty")
features.register(FileFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
def test_empty_project_with_core(self, project_loader):
project_loader("empty")
features.register(CoreFeature())
features.register(FileFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
@pytest.mark.skipif("os.name == 'nt'")
def test_named_user_group(self, project_loader):
project_loader("empty")
features.register(JsonnetFeature())
load_registered_features()
assert config.data.get('jsonnet.docker.user.name_to_uid')
assert config.data.get('jsonnet.docker.user.group_to_gid')
@pytest.mark.skipif("os.name != 'nt'")
def test_named_user_group_windows(self, project_loader):
project_loader("empty")
features.register(JsonnetFeature())
load_registered_features()
assert config.data.get('jsonnet.docker.user.name_to_uid') == {}
assert config.data.get('jsonnet.docker.user.group_to_gid') == {}
def test_example1(self, project_loader):
project_loader("example1")
features.register(CoreFeature())
features.register(FileFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('example1.json')
with open('example1.json', 'r') as f:
example = f.read()
with open('example1.expected.json', 'r') as f:
example_expected = f.read()
assert example == example_expected
def test_example1_yaml(self, project_loader):
project_loader("example1.yaml")
features.register(CoreFeature())
features.register(FileFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('example1.another')
with open('example1.another', 'r') as f:
example_another = f.read()
with open('example1.expected.another', 'r') as f:
example_another_expected = f.read()
assert example_another == example_another_expected
assert os.path.exists('example1.yaml')
with open('example1.yaml', 'r') as f:
example_yaml = f.read()
with open('example1.expected.yaml', 'r') as f:
example_yaml_expected = f.read()
assert example_yaml == example_yaml_expected
def test_example2(self, project_loader):
project_loader("example2")
features.register(CoreFeature())
features.register(FileFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('example2.json')
with open('example2.json', 'r') as f:
example = f.read()
with open('example2.expected.json', 'r') as f:
example_expected = f.read()
assert example == example_expected
def test_example3(self, project_loader):
project_loader("example3")
features.register(CoreFeature())
features.register(FileFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('uwsgi.ini')
with open('uwsgi.ini', 'r') as f:
iwsgi = f.read()
with open('uwsgi.expected.ini', 'r') as f:
iwsgi_expected = f.read()
assert iwsgi == iwsgi_expected
assert os.path.exists('init.sh')
with open('init.sh', 'r') as f:
init = f.read()
with open('init.expected.sh', 'r') as f:
init_expected = f.read()
assert init == init_expected
assert os.path.exists('cassandra.conf')
with open('cassandra.conf', 'r') as f:
cassandra = f.read()
with open('cassandra.expected.conf', 'r') as f:
cassandra_expected = f.read()
assert cassandra == cassandra_expected
def test_example3_with_dir(self, project_loader):
project_loader("example3.with_dir")
features.register(CoreFeature())
features.register(FileFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('./target/uwsgi.ini')
with open('./target/uwsgi.ini', 'r') as f:
iwsgi = f.read()
with open('uwsgi.expected.ini', 'r') as f:
iwsgi_expected = f.read()
assert iwsgi == iwsgi_expected
assert os.path.exists('./target/init.sh')
with open('./target/init.sh', 'r') as f:
init = f.read()
with open('init.expected.sh', 'r') as f:
init_expected = f.read()
assert init == init_expected
assert os.path.exists('./target/cassandra.conf')
with open('./target/cassandra.conf', 'r') as f:
cassandra = f.read()
with open('cassandra.expected.conf', 'r') as f:
cassandra_expected = f.read()
assert cassandra == cassandra_expected
def test_config_variables(self, project_loader):
project_loader("config_variables")
features.register(CoreFeature())
features.register(FileFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('variables.json')
with open('variables.json', 'r') as f:
variables = f.read()
with open('variables.expected.json', 'r') as f:
variables_expected = f.read()
assert variables == variables_expected
@pytest.mark.parametrize("variant", [
"test-dev",
"test-ci",
"test-stage",
"test-prod",
])
def test_docker_compose_traefik(self, project_loader, variant):
def before_load_config():
os.rename("ddb.%s.yml" % variant, "ddb.yml")
os.rename("docker-compose.expected.%s.yml" % variant, "docker-compose.expected.yml")
project_loader("docker_compose_traefik", before_load_config)
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
if os.name == 'nt':
mapped_cwd = re.sub(r"^([a-zA-Z]):", r"/\1", os.getcwd())
mapped_cwd = pathlib.Path(mapped_cwd).as_posix()
expected_data = expected_data.replace("%ddb.path.project%", mapped_cwd)
else:
expected_data = expected_data.replace("%ddb.path.project%", os.getcwd())
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected_data = expected_data.replace("%uid%", str(config.data.get('docker.user.uid')))
expected_data = expected_data.replace("%gid%", str(config.data.get('docker.user.gid')))
expected_data = expected_data.replace("%docker.debug.host%", str(config.data.get('docker.debug.host')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
@pytest.mark.parametrize("variant", [
"test-dev",
"test-ci",
"test-stage",
"test-prod",
])
def test_docker_compose_traefik_no_https(self, project_loader, variant):
def before_load_config():
os.rename("ddb.%s.yml" % variant, "ddb.yml")
os.rename("docker-compose.expected.%s.yml" % variant, "docker-compose.expected.yml")
project_loader("docker_compose_traefik_no_https", before_load_config)
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
if os.name == 'nt':
mapped_cwd = re.sub(r"^([a-zA-Z]):", r"/\1", os.getcwd())
mapped_cwd = pathlib.Path(mapped_cwd).as_posix()
expected_data = expected_data.replace("%ddb.path.project%", mapped_cwd)
else:
expected_data = expected_data.replace("%ddb.path.project%", os.getcwd())
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected_data = expected_data.replace("%uid%", str(config.data.get('docker.user.uid')))
expected_data = expected_data.replace("%gid%", str(config.data.get('docker.user.gid')))
expected_data = expected_data.replace("%docker.debug.host%", str(config.data.get('docker.debug.host')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
@pytest.mark.parametrize("variant", [
"dev",
"ci",
"prod",
])
def test_docker_compose_traefik_defaults(self, project_loader, variant):
def before_load_config():
os.rename("ddb.%s.yml" % variant, "ddb.yml")
os.rename("docker-compose.expected.%s.yml" % variant, "docker-compose.expected.yml")
project_loader("docker_compose_traefik_defaults", before_load_config)
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
if os.name == 'nt':
mapped_cwd = re.sub(r"^([a-zA-Z]):", r"/\1", os.getcwd())
mapped_cwd = pathlib.Path(mapped_cwd).as_posix()
expected_data = expected_data.replace("%ddb.path.project%", mapped_cwd)
else:
expected_data = expected_data.replace("%ddb.path.project%", os.getcwd())
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected_data = expected_data.replace("%uid%", str(config.data.get('docker.user.uid')))
expected_data = expected_data.replace("%gid%", str(config.data.get('docker.user.gid')))
expected_data = expected_data.replace("%docker.debug.host%", str(config.data.get('docker.debug.host')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
def test_docker_compose_variables(self, project_loader):
project_loader("docker_compose_variables")
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
if os.name == 'nt':
mapped_cwd = re.sub(r"^([a-zA-Z]):", r"/\1", os.getcwd())
mapped_cwd = pathlib.Path(mapped_cwd).as_posix()
expected_data = expected_data.replace("%ddb.path.project%", mapped_cwd)
else:
expected_data = expected_data.replace("%ddb.path.project%", os.getcwd())
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected_data = expected_data.replace("%uid%", str(config.data.get('docker.user.uid')))
expected_data = expected_data.replace("%gid%", str(config.data.get('docker.user.gid')))
expected_data = expected_data.replace("%docker.debug.host%", str(config.data.get('docker.debug.host')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
def test_docker_compose_project_dot_com(self, project_loader):
project_loader("docker_compose_project_dot_com")
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
if os.name == 'nt':
mapped_cwd = re.sub(r"^([a-zA-Z]):", r"/\1", os.getcwd())
mapped_cwd = pathlib.Path(mapped_cwd).as_posix()
expected_data = expected_data.replace("%ddb.path.project%", mapped_cwd)
else:
expected_data = expected_data.replace("%ddb.path.project%", os.getcwd())
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected_data = expected_data.replace("%uid%", str(config.data.get('docker.user.uid')))
expected_data = expected_data.replace("%gid%", str(config.data.get('docker.user.gid')))
expected_data = expected_data.replace("%docker.debug.host%", str(config.data.get('docker.debug.host')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
def test_docker_compose_excluded_services(self, project_loader):
project_loader("docker_compose_excluded_services")
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
def test_docker_compose_included_services(self, project_loader):
project_loader("docker_compose_included_services")
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
@pytest.mark.parametrize("variant", [
"_register_binary",
"_register_binary_with_one_option",
# "_register_binary_with_multiple_options", TODO handle (options)(c1)
"_shared_volumes",
"_mount_volumes",
"_mount_single_volume",
"_mount_single_volume_with_default",
"_expose"
])
def test_docker_compose_variants(self, project_loader, variant):
project_loader("docker_compose" + variant)
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
if os.name == 'nt':
mapped_cwd = re.sub(r"^([a-zA-Z]):", r"/\1", os.getcwd())
mapped_cwd = pathlib.Path(mapped_cwd).as_posix()
expected_data = expected_data.replace("%ddb.path.project%", mapped_cwd)
else:
expected_data = expected_data.replace("%ddb.path.project%", os.getcwd())
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected_data = expected_data.replace("%uid%", str(config.data.get('docker.user.uid')))
expected_data = expected_data.replace("%gid%", str(config.data.get('docker.user.gid')))
expected_data = expected_data.replace("%docker.debug.host%", str(config.data.get('docker.debug.host')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
if variant == '_mount_single_volume':
assert os.path.isdir('volumes/shared-volume')
if variant == '_mount_single_volume_with_default':
assert os.path.isdir('shared-volume')
@pytest.mark.parametrize("variant", [
"default",
"no_debug",
])
def test_docker_compose_xdebug(self, project_loader, variant):
def before_load_config():
os.rename("ddb.%s.yml" % variant, "ddb.yml")
os.rename("docker-compose.expected.%s.yml" % variant, "docker-compose.expected.yml")
project_loader("docker_compose_xdebug", before_load_config)
print(os.getcwd())
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('docker-compose.yml')
with open('docker-compose.yml', 'r') as f:
rendered = yaml.load(f.read(), yaml.SafeLoader)
with open('docker-compose.expected.yml', 'r') as f:
expected_data = f.read()
expected_data = expected_data.replace("%network_name%",
str(config.data.get('jsonnet.docker.compose.network_name')))
expected_data = expected_data.replace("%uid%", str(config.data.get('docker.user.uid')))
expected_data = expected_data.replace("%gid%", str(config.data.get('docker.user.gid')))
expected_data = expected_data.replace("%docker.debug.host%", str(config.data.get('docker.debug.host')))
expected = yaml.load(expected_data, yaml.SafeLoader)
assert rendered == expected
class TestJsonnetAutofix:
def teardown_method(self, test_method):
migrations.set_history()
def test_autofix_variables_only(self, project_loader):
project_loader("autofix_variables_only")
config.args.autofix = True
history = (
PropertyMigration("old_property",
"new_property", since="v1.1.0"),
PropertyMigration("some.deep.old.property",
"some.another.new.property", since="v1.1.0"),
)
migrations.set_history(history)
features.register(CoreFeature())
features.register(FileFeature())
features.register(DockerFeature())
features.register(JsonnetFeature())
load_registered_features()
register_actions_in_event_bus(True)
action = FileWalkAction()
action.initialize()
action.execute()
assert os.path.exists('variables.json')
with open('variables.json', 'r') as f:
rendered = f.read()
with open('variables.expected.json', 'r') as f:
expected = f.read()
assert expected == rendered
with open('variables.json.jsonnet', 'r') as f:
source = f.read()
with open('variables.json.autofix', 'r') as f:
fixed = f.read()
assert source == fixed
| 36.467176 | 115 | 0.616135 | 2,623 | 23,886 | 5.421655 | 0.059855 | 0.08607 | 0.012376 | 0.070881 | 0.884959 | 0.862246 | 0.831517 | 0.825469 | 0.814992 | 0.804303 | 0 | 0.00197 | 0.25605 | 23,886 | 654 | 116 | 36.522936 | 0.798312 | 0.002805 | 0 | 0.744898 | 0 | 0 | 0.158374 | 0.067011 | 0 | 0 | 0 | 0.001529 | 0.1 | 1 | 0.05102 | false | 0 | 0.026531 | 0 | 0.081633 | 0.002041 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d858c06bb9b347f4b79b183fae56fd871d37885f | 8,654 | py | Python | WeOptPy/tests/test_task.py | kb2623/WeOptPy | 2e9e75acf8fedde0ae4c99da6c786a712d4f011c | [
"MIT"
] | 1 | 2021-05-12T10:02:21.000Z | 2021-05-12T10:02:21.000Z | WeOptPy/tests/test_task.py | kb2623/WeOptPy | 2e9e75acf8fedde0ae4c99da6c786a712d4f011c | [
"MIT"
] | null | null | null | WeOptPy/tests/test_task.py | kb2623/WeOptPy | 2e9e75acf8fedde0ae4c99da6c786a712d4f011c | [
"MIT"
] | null | null | null | # encoding=utf8
"""Task test case module."""
from unittest import TestCase
import numpy as np
from numpy import random as rnd
from WeOptPy.util import full_array, FesException, GenException, RefException
from WeOptPy.task import StoppingTask, ThrowingTask
from WeOptPy.task.interfaces import UtilityFunction
class MyBenchmark(UtilityFunction):
def __init__(self):
self.Lower = -10.0
self.Upper = 10
def function(self):
def evaluate(x): return sum(x ** 2)
return evaluate
class StoppingTaskBaseTestCase(TestCase):
r"""Test case for testing `Task`, `StoppingTask` and `CountingTask` classes.
Date:
April 2019
Author:
Klemen Berkovič
See Also:
* :class:`WeOptPy.util.Task`
* :class:`WeOptPy.util.CountingTask`
* :class:`WeOptPy.util.StoppingTask`
"""
def setUp(self):
self.D = 6
self.Lower, self.Upper = [2, 1, 1], [10, 10, 2]
self.task = StoppingTask(lower=self.Lower, upper=self.Upper, d=self.D)
def test_dim_ok(self):
self.assertEqual(self.D, self.task.D)
self.assertEqual(self.D, self.task.dim())
def test_lower(self):
self.assertTrue(np.array_equal(full_array(self.Lower, self.D), self.task.Lower))
self.assertTrue(np.array_equal(full_array(self.Lower, self.D), self.task.lower()))
def test_upper(self):
self.assertTrue(np.array_equal(full_array(self.Upper, self.D), self.task.Upper))
self.assertTrue(np.array_equal(full_array(self.Upper, self.D), self.task.upper()))
def test_range(self):
self.assertTrue(np.array_equal(full_array(self.Upper, self.D) - full_array(self.Lower, self.D), self.task.bRange))
self.assertTrue(np.array_equal(full_array(self.Upper, self.D) - full_array(self.Lower, self.D), self.task.range()))
def test_ngens(self):
self.assertEqual(np.inf, self.task.nGEN)
def test_nfess(self):
self.assertEqual(np.inf, self.task.nFES)
def test_stop_cond(self):
self.assertFalse(self.task.stop_cond())
def test_stop_condi(self):
self.assertFalse(self.task.stop_cond_i())
def test_eval(self):
self.assertRaises(AttributeError, lambda: self.task.eval([]))
def test_evals(self):
self.assertEqual(0, self.task.evals())
def test_iters(self):
self.assertEqual(0, self.task.iters())
def test_next_iter(self):
self.assertEqual(None, self.task.next_iteration())
def test_is_feasible(self):
self.assertFalse(self.task.is_feasible(full_array([1, 2, 3], self.D)))
class StoppingTaskTestCase(TestCase):
r"""Test case for testing `Task`, `StoppingTask` and `CountingTask` classes.
Date:
April 2019
Author:
Klemen Berkovič
See Also:
* :class:`WeOptPy.util.Task`
* :class:`WeOptPy.util.CountingTask`
* :class:`WeOptPy.util.StoppingTask`
"""
def setUp(self):
self.D, self.nFES, self.nGEN = 10, 10, 10
self.t = StoppingTask(d=self.D, no_fes=self.nFES, no_gen=self.nGEN, rvalue=1, benchmark=MyBenchmark())
def test_isFeasible_fine(self):
x = np.full(self.D, 10)
self.assertTrue(self.t.is_feasible(x))
x = np.full(self.D, -10)
self.assertTrue(self.t.is_feasible(x))
x = rnd.uniform(-10, 10, self.D)
self.assertTrue(self.t.is_feasible(x))
x = np.full(self.D, -20)
self.assertFalse(self.t.is_feasible(x))
x = np.full(self.D, 20)
self.assertFalse(self.t.is_feasible(x))
def test_nextIter_fine(self):
for i in range(self.nGEN):
self.assertFalse(self.t.stop_cond())
self.t.next_iteration()
self.assertTrue(self.t.stop_cond())
def test_stopCondI(self):
for i in range(self.nGEN): self.assertFalse(self.t.stop_cond_i(), msg='Error at %s iteration!!!' % (i))
self.assertTrue(self.t.stop_cond_i())
def test_eval_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nFES): self.assertAlmostEqual(self.t.eval(x), self.D, msg='Error at %s iteration!!!' % (i))
self.assertTrue(self.t.stop_cond())
def test_eval_over_nFES_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nFES): self.t.eval(x)
self.assertEqual(np.inf, self.t.eval(x))
self.assertTrue(self.t.stop_cond())
def test_eval_over_nGEN_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nGEN): self.t.next_iteration()
self.assertEqual(np.inf, self.t.eval(x))
self.assertTrue(self.t.stop_cond())
def test_nFES_count_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nFES):
self.t.eval(x)
self.assertEqual(self.t.Evals, i + 1, 'Error at %s. evaluation' % (i + 1))
def test_nGEN_count_fine(self):
for i in range(self.nGEN):
self.t.next_iteration()
self.assertEqual(self.t.Iters, i + 1, 'Error at %s. iteration' % (i + 1))
def test_stopCond_evals_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nFES - 1):
self.t.eval(x)
self.assertFalse(self.t.stop_cond())
self.t.eval(x)
self.assertTrue(self.t.stop_cond())
def test_stopCond_iters_fine(self):
for i in range(self.nGEN - 1):
self.t.next_iteration()
self.assertFalse(self.t.stop_cond())
self.t.next_iteration()
self.assertTrue(self.t.stop_cond())
def test_stopCond_refValue_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nGEN - 5):
self.assertFalse(self.t.stop_cond())
self.assertEqual(self.D, self.t.eval(x))
self.t.next_iteration()
x = np.full(self.D, 0.0)
self.assertEqual(0, self.t.eval(x))
self.assertTrue(self.t.stop_cond())
self.assertEqual(self.nGEN - 5, self.t.Iters)
def test_print_conv_one_fine(self):
r1, r2 = [], []
for i in range(self.nFES):
x = np.full(self.D, 10 - i)
r1.append(i + 1), r2.append(self.t.eval(x))
t_r1, t_r2 = self.t.return_conv()
self.assertTrue(np.array_equal(r1, t_r1))
self.assertTrue(np.array_equal(r2, t_r2))
def test_print_conv_two_fine(self):
r1, r2 = [], []
for i in range(self.nFES):
x = np.full(self.D, 10 - i if i not in (3, 4, 5) else 4)
r1.append(i + 1), r2.append(self.t.eval(x))
t_r1, t_r2 = self.t.return_conv()
self.assertTrue(np.array_equal(r2, t_r2))
self.assertTrue(np.array_equal(r1, t_r1))
class ThrowingTaskTestCase(TestCase):
r"""Test case for testing `ThrowingTask` class.
Date:
April 2019
Author:
Klemen Berkovič
See Also:
* :class:`NiaPy.util.ThrowingTask`
"""
def setUp(self):
self.D, self.nFES, self.nGEN = 10, 10, 10
self.t = ThrowingTask(d=self.D, no_fes=self.nFES, no_gen=self.nGEN, rvalue=0, benchmark=MyBenchmark())
def test_isFeasible_fine(self):
x = np.full(self.D, 10)
self.assertTrue(self.t.is_feasible(x))
x = np.full(self.D, -10)
self.assertTrue(self.t.is_feasible(x))
x = rnd.uniform(-10, 10, self.D)
self.assertTrue(self.t.is_feasible(x))
x = np.full(self.D, -20)
self.assertFalse(self.t.is_feasible(x))
x = np.full(self.D, 20)
self.assertFalse(self.t.is_feasible(x))
def test_nextIter_fine(self):
for i in range(self.nGEN):
self.assertFalse(self.t.stop_cond())
self.t.next_iteration()
self.assertTrue(self.t.stop_cond())
def test_stopCondI(self):
for i in range(self.nGEN): self.assertFalse(self.t.stop_cond_i())
self.assertTrue(self.t.stop_cond_i())
def test_eval_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nFES): self.assertAlmostEqual(self.t.eval(x), self.D, msg='Error at %s iteration!!!' % (i))
self.assertRaises(FesException, lambda: self.t.eval(x))
def test_eval_over_nFES_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nFES):
self.t.eval(x)
self.assertRaises(FesException, lambda: self.t.eval(x))
def test_eval_over_nGEN_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nGEN): self.t.next_iteration()
self.assertRaises(GenException, lambda: self.t.eval(x))
def test_nFES_count_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nFES):
self.t.eval(x)
self.assertEqual(self.t.Evals, i + 1, 'Error at %s. evaluation' % (i + 1))
def test_nGEN_count_fine(self):
for i in range(self.nGEN):
self.t.next_iteration()
self.assertEqual(self.t.Iters, i + 1, 'Error at %s. iteration' % (i + 1))
def test_stopCond_evals_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nFES - 1):
self.t.eval(x)
self.assertFalse(self.t.stop_cond())
self.t.eval(x)
self.assertTrue(self.t.stop_cond())
def test_stopCond_iters_fine(self):
for i in range(self.nGEN - 1):
self.t.next_iteration()
self.assertFalse(self.t.stop_cond())
self.t.next_iteration()
self.assertTrue(self.t.stop_cond())
def test_stopCond_refValue_fine(self):
x = np.full(self.D, 1.0)
for i in range(self.nGEN - 5):
self.assertFalse(self.t.stop_cond())
self.assertEqual(self.D, self.t.eval(x))
self.t.next_iteration()
x = np.full(self.D, 0.0)
self.assertEqual(0, self.t.eval(x))
self.assertRaises(RefException, lambda: self.t.eval(x))
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
| 29.535836 | 117 | 0.70037 | 1,474 | 8,654 | 3.991859 | 0.090231 | 0.063732 | 0.028552 | 0.044867 | 0.819171 | 0.811863 | 0.78416 | 0.755438 | 0.743202 | 0.734874 | 0 | 0.019933 | 0.136238 | 8,654 | 292 | 118 | 29.636986 | 0.767224 | 0.091056 | 0 | 0.684729 | 0 | 0 | 0.020488 | 0 | 0 | 0 | 0 | 0 | 0.344828 | 1 | 0.211823 | false | 0 | 0.029557 | 0.004926 | 0.26601 | 0.009852 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2b19d4a35f396e61e9eae2f68f237c1cb5e3c171 | 35,457 | py | Python | tests/test_dump.py | abs-tudelft/vhdeps | dfd679e1c3d8fa1c61285558b0589f40ecd40441 | [
"Apache-2.0"
] | 17 | 2019-06-06T06:28:38.000Z | 2021-04-23T09:52:10.000Z | tests/test_dump.py | jonasjj/vhdeps | dfd679e1c3d8fa1c61285558b0589f40ecd40441 | [
"Apache-2.0"
] | 34 | 2019-06-17T11:55:28.000Z | 2020-10-01T11:27:49.000Z | tests/test_dump.py | jvanstraten/vhdeps | dfd679e1c3d8fa1c61285558b0589f40ecd40441 | [
"Apache-2.0"
] | 1 | 2021-04-23T05:22:41.000Z | 2021-04-23T05:22:41.000Z | """Tests the dependency analyzer and `dump` backend."""
from unittest import TestCase
import os
import tempfile
from plumbum import local
from .common import run_vhdeps
DIR = os.path.realpath(os.path.dirname(__file__))
class TestDump(TestCase):
"""Tests the dependency analyzer and `dump` backend."""
def test_basic(self):
"""Test basic functionality of the dump backend"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/multiple-ok')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/multiple-ok/bar_tc.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/baz.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/foo_tc.vhd',
]) + '\n')
def test_to_file(self):
"""Test outputting a dependency dump to a file"""
with tempfile.TemporaryDirectory() as tempdir:
code, _, _ = run_vhdeps(
'dump',
'-i', DIR + '/simple/multiple-ok',
'-o', tempdir+'/output')
self.assertEqual(code, 0)
with open(tempdir+'/output', 'r') as fildes:
self.assertEqual(fildes.read(), '\n'.join([
'top work 2008 ' + DIR + '/simple/multiple-ok/bar_tc.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/baz.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/foo_tc.vhd',
]) + '\n')
def test_default_include(self):
"""Test implicit working directory inclusion"""
with local.cwd(DIR + '/simple/multiple-ok'):
code, out, err = run_vhdeps('dump')
self.assertEqual(code, 0)
self.assertTrue('Including the current working directory recursively by default' in err)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/multiple-ok/bar_tc.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/baz.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/foo_tc.vhd',
]) + '\n')
def test_default_include_by_file(self):
"""Test including files instead of directories"""
code, out, _ = run_vhdeps(
'dump',
'-i', DIR + '/simple/multiple-ok',
'-i', DIR + '/simple/all-good/test_tc.vhd')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/multiple-ok/bar_tc.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/baz.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/foo_tc.vhd',
'top work 2008 ' + DIR + '/simple/all-good/test_tc.vhd',
]) + '\n')
def test_default_include_by_glob(self):
"""Test including files using glob syntax"""
code, out, _ = run_vhdeps(
'dump',
'-i', DIR + '/simple/multiple-ok/ba*.vhd')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/multiple-ok/bar_tc.vhd',
'top work 2008 ' + DIR + '/simple/multiple-ok/baz.vhd',
]) + '\n')
def test_default_filters(self):
"""Test the default version/mode filters"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/filtering')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/filtering/new.08.vhd',
'top work 1993 ' + DIR + '/simple/filtering/old.93.vhd',
'top work 2008 ' + DIR + '/simple/filtering/simulation.sim.vhd',
]) + '\n')
def test_fixed_version_1993(self):
"""Test the required version filter"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/filtering', '-v93')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 1993 ' + DIR + '/simple/filtering/old.93.vhd',
'top work 1993 ' + DIR + '/simple/filtering/simulation.sim.vhd',
]) + '\n')
def test_desired_version(self):
"""Test the desired version filter"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/filtering', '-d93')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/filtering/new.08.vhd',
'top work 1993 ' + DIR + '/simple/filtering/old.93.vhd',
'top work 1993 ' + DIR + '/simple/filtering/simulation.sim.vhd',
]) + '\n')
def test_synthesis(self):
"""Test the synthesis filter"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/filtering', '-msyn')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/filtering/new.08.vhd',
'top work 1993 ' + DIR + '/simple/filtering/old.93.vhd',
'top work 2008 ' + DIR + '/simple/filtering/synthesis.syn.vhd',
]) + '\n')
def test_no_filtering(self):
"""Test all filters disabled"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/filtering', '-mall')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/filtering/new.08.vhd',
'top work 1993 ' + DIR + '/simple/filtering/old.93.vhd',
'top work 2008 ' + DIR + '/simple/filtering/simulation.sim.vhd',
'top work 2008 ' + DIR + '/simple/filtering/synthesis.syn.vhd',
]) + '\n')
def test_selected_entities(self):
"""Test toplevel entity selection"""
code, out, _ = run_vhdeps('dump', 'new', 'old', '-i', DIR + '/simple/filtering')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/filtering/new.08.vhd',
'top work 1993 ' + DIR + '/simple/filtering/old.93.vhd',
]) + '\n')
def test_selected_entity_glob(self):
"""Test toplevel entity selection with fnmatch globs"""
code, out, _ = run_vhdeps('dump', 's*', '-i', DIR + '/simple/filtering')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/filtering/simulation.sim.vhd',
]) + '\n')
def test_selected_entity_no_match(self):
"""Test toplevel entity selection with globs that don't match
anything"""
code, out, err = run_vhdeps('dump', 's*', 'x*', '-i', DIR + '/simple/filtering')
self.assertEqual(code, 0)
self.assertTrue('Warning: work.x* did not match anything.' in err)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/filtering/simulation.sim.vhd',
]) + '\n')
def test_conflict(self):
"""Test conflicting entities (defined in multiple files)"""
code, _, err = run_vhdeps(
'dump',
'-i', DIR + '/simple/all-good',
'-i', DIR + '/simple/timeout')
self.assertEqual(code, 1)
self.assertTrue('ResolutionError: entity work.test_tc is defined in '
'multiple, ambiguous files:' in err)
def test_ignore_pragmas(self):
"""Test ignore-use pragmas"""
code, _, _ = run_vhdeps('dump', '-i', DIR + '/complex/ignore-use')
self.assertEqual(code, 0)
def test_missing_package(self):
"""Test missing package detection/error"""
code, _, err = run_vhdeps('dump', '-i', DIR + '/complex/vhlib/util/UtilMem64_pkg.vhd')
self.assertEqual(code, 1)
self.assertTrue('complex/vhlib/util/UtilMem64_pkg.vhd' in err)
self.assertTrue('could not find package work.utilstr_pkg' in err)
def test_missing_component(self):
"""Test missing component detection/error"""
code, _, err = run_vhdeps('dump', '-i', DIR + '/complex/missing-component')
self.assertEqual(code, 1)
self.assertTrue('could not find component declaration for missing' in err)
def test_black_box_enforce(self):
"""Test black box detection/error"""
code, _, err = run_vhdeps(
'dump',
'-i', DIR + '/complex/vhlib/util',
'-i', DIR + '/complex/vhlib/stream/Stream_pkg.vhd',
'-i', DIR + '/complex/vhlib/stream/StreamBuffer.vhd')
self.assertEqual(code, 1)
self.assertTrue('complex/vhlib/stream/StreamBuffer.vhd' in err)
self.assertTrue('black box: could not find entity work.streamfifo' in err)
def test_black_box_ignore(self):
"""Test ignoring a black box through the -x flag"""
code, _, _ = run_vhdeps(
'dump',
'-i', DIR + '/complex/vhlib/util',
'-x', DIR + '/complex/vhlib/stream/Stream_pkg.vhd',
'-i', DIR + '/complex/vhlib/stream/StreamBuffer.vhd')
self.assertEqual(code, 0)
def test_missing_filtered(self):
"""Test detection of missing dependencies due to active filters"""
code, _, err = run_vhdeps('dump', '-i', DIR + '/complex/missing-filtered')
self.assertEqual(code, 1)
self.assertTrue('entity work.synth_only is defined, but only in files '
'that were filtered out:' in err)
self.assertTrue('synth_only.syn.vhd is synthesis-only' in err)
def test_libraries(self):
"""Test multiple libraries"""
code, out, _ = run_vhdeps(
'dump',
'-i', DIR + '/simple/all-good',
'-i', 'timeout:' + DIR + '/simple/timeout')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top timeout 2008 ' + DIR + '/simple/timeout/test_tc.vhd',
'top work 2008 ' + DIR + '/simple/all-good/test_tc.vhd',
]) + '\n')
def test_version_override(self):
"""Test version overrides in the include flag"""
code, out, _ = run_vhdeps(
'dump',
'-i', DIR + '/simple/all-good',
'-i', '93:timeout:' + DIR + '/simple/timeout')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top timeout 1993 ' + DIR + '/simple/timeout/test_tc.vhd',
'top work 2008 ' + DIR + '/simple/all-good/test_tc.vhd',
]) + '\n')
def test_ambiguous_08(self):
"""Test disambiguation by default desired version"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/ambiguous')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/ambiguous/test.08.sim.vhd',
]) + '\n')
def test_ambiguous_93(self):
"""Test disambiguation by specific desired version"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/ambiguous', '-d', '93')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 1993 ' + DIR + '/simple/ambiguous/test.93.sim.vhd',
]) + '\n')
def test_ambiguous_syn(self):
"""Test disambiguation by synthesis vs. simulation mode"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/simple/ambiguous', '-m', 'syn')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/simple/ambiguous/test.syn.vhd',
]) + '\n')
def test_component_circle(self):
"""Test recursive instantiation using components"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/complex/component-circle')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'dep work 2008 ' + DIR + '/complex/component-circle/a.vhd',
'dep work 2008 ' + DIR + '/complex/component-circle/b.vhd',
]) + '\n')
def test_component_in_inst(self):
"""Test component keyword in instantiation"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/complex/component-in-inst')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/complex/component-in-inst/a.vhd',
'dep work 2008 ' + DIR + '/complex/component-in-inst/b.vhd',
]) + '\n')
def test_entity_circle(self):
"""Test the error message for a true circular dependency"""
code, _, err = run_vhdeps('dump', '-i', DIR + '/complex/entity-circle')
self.assertEqual(code, 1)
self.assertTrue('ResolutionError: circular dependency:' in err)
def test_multi_unit_circle(self):
"""Test circular dependencies caused by multiple design units per
file"""
code, _, err = run_vhdeps('dump', '-i', DIR + '/complex/multi-unit-circle')
self.assertEqual(code, 1)
self.assertTrue('ResolutionError: circular dependency:' in err)
def test_multi_unit_design(self):
"""Test dependency analysis when multiple entities are defined per
file"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/complex/multi-unit-design')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'dep work 2008 ' + DIR + '/complex/multi-unit-design/ab.vhd',
'dep work 2008 ' + DIR + '/complex/multi-unit-design/cd.vhd',
'top work 2008 ' + DIR + '/complex/multi-unit-design/test_tc.vhd',
]) + '\n')
def test_multi_tc_per_file(self):
"""Test the dump backend with multiple test cases per file"""
code, out, _ = run_vhdeps('dump', '-i', DIR + '/complex/multi-tc-per-file')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 2008 ' + DIR + '/complex/multi-tc-per-file/test_tc.vhd',
]) + '\n')
def test_vhlib_default(self):
"""Test the dependency analyzer with vhlib, default filters"""
#pylint: disable=C0301
self.maxDiff = None #pylint: disable=C0103
code, out, _ = run_vhdeps('dump', '-i', DIR + '/complex/vhlib')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'dep work 2008 ' + DIR + '/complex/vhlib/sim/TestCase_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/sim/SimDataComms_pkg.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/sim/SimDataComms_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamMonitor_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamSource_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamSink_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/Stream_pkg.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/sim/ClockGen_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_Fixed_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_RoundRobin_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_RRSticky_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamArb.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_0_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_200_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_2_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_4_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_6_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/util/UtilInt_pkg.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamElementCounter/StreamElementCounter_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamElementCounter/StreamElementCounter_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamElementCounter/StreamElementCounter_16_5_32_9_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamElementCounter/StreamElementCounter_8_3_63_6_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamElementCounter.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_Increase_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_Reduce_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_Same_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_2_2_8_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_32_5_16_4_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_5_4_3_2_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_8_4_8_3_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamGearbox.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamGearboxParallelizer.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamGearboxSerializer.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamNormalizer.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamNormalizer/StreamNormalizer_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamNormalizer/StreamNormalizer_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/util/UtilMisc_pkg.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamPipelineBarrel.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineBarrel/StreamPipelineBarrel_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineBarrel/StreamPipelineBarrel_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineControl/StreamPipelineControl_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineControl/StreamPipelineControl_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineControl/StreamPipelineControl_20_3_t_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineControl/StreamPipelineControl_5_1_f_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamPrefixSum.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPrefixSum/StreamPrefixSum_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPrefixSum/StreamPrefixSum_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPRNG/StreamPRNG_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPRNG/StreamPRNG_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPRNG/StreamPRNG_12_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPRNG/StreamPRNG_8_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamPRNG.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_1_1_7_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_4_3_4_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_8_3_4_2_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_1_1_7_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_4_3_4_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_8_3_4_2_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamPipelineControl.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamFIFOCounter.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/util/UtilRam_pkg.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamFIFO.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamBuffer.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamReshaper.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSink/StreamSink_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamSlice.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSlice/StreamSlice_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSlice/StreamSlice_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSource/StreamSource_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamSource_mdl.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamMonitor_mdl.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamSink_mdl.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/StreamSync.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/sim/ClockGen_mdl.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSync/StreamSync_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSync/StreamSync_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/util/UtilRam1R1W.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/util/UtilConv_pkg.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/util/UtilStr_pkg.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/util/UtilMem64_pkg.vhd',
]) + '\n')
def test_vhlib_93_desired(self):
"""Test the dependency analyzer with vhlib, preferring v93"""
#pylint: disable=C0301
self.maxDiff = None #pylint: disable=C0103
code, out, _ = run_vhdeps('dump', '-i', DIR + '/complex/vhlib', '-d', '93')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'dep work 2008 ' + DIR + '/complex/vhlib/sim/TestCase_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/sim/SimDataComms_pkg.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/sim/SimDataComms_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamMonitor_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamSource_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamSink_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_tv.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/Stream_pkg.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/sim/ClockGen_pkg.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_Fixed_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_RoundRobin_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamArb/StreamArb_RRSticky_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamArb.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_0_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_200_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_2_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_4_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamBuffer/StreamBuffer_6_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilInt_pkg.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamElementCounter/StreamElementCounter_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamElementCounter/StreamElementCounter_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamElementCounter/StreamElementCounter_16_5_32_9_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamElementCounter/StreamElementCounter_8_3_63_6_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamElementCounter.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_Increase_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_Reduce_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamFIFO/StreamFIFO_Same_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_2_2_8_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_32_5_16_4_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_5_4_3_2_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamGearbox/StreamGearbox_8_4_8_3_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamGearbox.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamGearboxParallelizer.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamGearboxSerializer.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamNormalizer.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamNormalizer/StreamNormalizer_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamNormalizer/StreamNormalizer_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilMisc_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamPipelineBarrel.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineBarrel/StreamPipelineBarrel_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineBarrel/StreamPipelineBarrel_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineControl/StreamPipelineControl_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineControl/StreamPipelineControl_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineControl/StreamPipelineControl_20_3_t_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPipelineControl/StreamPipelineControl_5_1_f_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamPrefixSum.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPrefixSum/StreamPrefixSum_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPrefixSum/StreamPrefixSum_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPRNG/StreamPRNG_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPRNG/StreamPRNG_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPRNG/StreamPRNG_12_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamPRNG/StreamPRNG_8_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamPRNG.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_1_1_7_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_4_3_4_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperCtrl_8_3_4_2_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_tv.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_1_1_7_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_4_3_4_3_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamReshaper/StreamReshaperLast_8_3_4_2_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamPipelineControl.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamFIFOCounter.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilRam_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamFIFO.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamBuffer.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamReshaper.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSink/StreamSink_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamSlice.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSlice/StreamSlice_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSlice/StreamSlice_tc.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSource/StreamSource_tc.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamSource_mdl.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamMonitor_mdl.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/model/StreamSink_mdl.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamSync.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/sim/ClockGen_mdl.sim.08.vhd',
'dep work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSync/StreamSync_tb.sim.08.vhd',
'top work 2008 ' + DIR + '/complex/vhlib/stream/test/StreamSync/StreamSync_tc.sim.08.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilRam1R1W.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilConv_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilStr_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilMem64_pkg.vhd',
]) + '\n')
def test_vhlib_93_required(self):
"""Test the dependency analyzer with vhlib, synthesis only"""
self.maxDiff = None
code, out, _ = run_vhdeps('dump', '-i', DIR + '/complex/vhlib', '-v', '93')
self.assertEqual(code, 0)
self.assertEqual(out, '\n'.join([
'top work 1993 ' + DIR + '/complex/vhlib/stream/StreamArb.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilInt_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/Stream_pkg.vhd',
'top work 1993 ' + DIR + '/complex/vhlib/stream/StreamElementCounter.vhd',
'top work 1993 ' + DIR + '/complex/vhlib/stream/StreamGearbox.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamGearboxParallelizer.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamGearboxSerializer.vhd',
'top work 1993 ' + DIR + '/complex/vhlib/stream/StreamNormalizer.vhd',
'top work 1993 ' + DIR + '/complex/vhlib/stream/StreamPrefixSum.vhd',
'top work 1993 ' + DIR + '/complex/vhlib/stream/StreamPRNG.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamPipelineControl.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilMisc_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamPipelineBarrel.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamFIFOCounter.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilRam_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamFIFO.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamBuffer.vhd',
'top work 1993 ' + DIR + '/complex/vhlib/stream/StreamReshaper.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/stream/StreamSlice.vhd',
'top work 1993 ' + DIR + '/complex/vhlib/stream/StreamSync.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilRam1R1W.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilConv_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilStr_pkg.vhd',
'dep work 1993 ' + DIR + '/complex/vhlib/util/UtilMem64_pkg.vhd',
]) + '\n')
| 65.661111 | 132 | 0.618355 | 4,452 | 35,457 | 4.818958 | 0.057727 | 0.109537 | 0.152419 | 0.17717 | 0.899832 | 0.886828 | 0.865619 | 0.84511 | 0.806004 | 0.778736 | 0 | 0.056873 | 0.229376 | 35,457 | 539 | 133 | 65.782931 | 0.728297 | 0.048256 | 0 | 0.687636 | 0 | 0 | 0.572455 | 0.424563 | 0 | 0 | 0 | 0 | 0.154013 | 1 | 0.073753 | false | 0 | 0.010846 | 0 | 0.086768 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
2b44e2611f7774f99ff05e9717211e06142ca202 | 2,116 | py | Python | mlapp/handlers/file_storages/file_storage_interface.py | kerenleibovich/mlapp | 0b8dfaba7a7070ab68cb29ff61dd1c7dd8076693 | [
"Apache-2.0"
] | 33 | 2021-02-26T10:41:09.000Z | 2021-11-07T12:35:32.000Z | mlapp/handlers/file_storages/file_storage_interface.py | kerenleibovich/mlapp | 0b8dfaba7a7070ab68cb29ff61dd1c7dd8076693 | [
"Apache-2.0"
] | 17 | 2021-03-04T15:37:21.000Z | 2021-04-06T12:00:13.000Z | mlapp/handlers/file_storages/file_storage_interface.py | kerenleibovich/mlapp | 0b8dfaba7a7070ab68cb29ff61dd1c7dd8076693 | [
"Apache-2.0"
] | 9 | 2021-03-03T20:02:41.000Z | 2021-10-05T13:03:56.000Z | from abc import ABCMeta, abstractmethod
class FileStorageInterface:
__metaclass__ = ABCMeta
@abstractmethod
def download_file(self, bucket_name, object_name, file_path, *args, **kwargs):
"""
Downloads file from file storage
:param bucket_name: name of the bucket/container
:param object_name: name of the object/file
:param file_path: path to local file
:param args: other arguments containing additional information
:param kwargs: other keyword arguments containing additional information
:return: None
"""
raise NotImplementedError()
@abstractmethod
def stream_file(self, bucket_name, object_name, *args, **kwargs):
"""
Streams file from file storage
:param bucket_name: name of the bucket/container
:param object_name: name of the object/file
:param args: other arguments containing additional information
:param kwargs: other keyword arguments containing additional information
:return: file stream
"""
raise NotImplementedError()
@abstractmethod
def upload_file(self, bucket_name, object_name, file_path, *args, **kwargs):
"""
Uploads file to file storage
:param bucket_name: name of the bucket/container
:param object_name: name of the object/file
:param file_path: path to local file
:param args: other arguments containing additional information
:param kwargs: other keyword arguments containing additional information
:return: None
"""
raise NotImplementedError()
@abstractmethod
def list_files(self, bucket_name, prefix="", *args, **kwargs):
"""
Lists files in file storage
:param bucket_name: name of the bucket/container
:param prefix: prefix string to search by
:param args: other arguments containing additional information
:param kwargs: other keyword arguments containing additional information
:return: file names list
"""
raise NotImplementedError()
| 36.482759 | 82 | 0.67344 | 232 | 2,116 | 6.030172 | 0.211207 | 0.057184 | 0.165833 | 0.228735 | 0.778413 | 0.778413 | 0.758399 | 0.758399 | 0.758399 | 0.758399 | 0 | 0 | 0.26276 | 2,116 | 57 | 83 | 37.122807 | 0.896795 | 0.558601 | 0 | 0.533333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.266667 | false | 0 | 0.066667 | 0 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2b5f1f7773e329b9bb1e4b40641e361faa30cf54 | 9,224 | py | Python | web/mooctracker/api/tests.py | Jaaga/mooc-tracker | b7be270d24fa2608042064dc87ae13740893bade | [
"MIT"
] | null | null | null | web/mooctracker/api/tests.py | Jaaga/mooc-tracker | b7be270d24fa2608042064dc87ae13740893bade | [
"MIT"
] | 1 | 2020-06-05T17:43:59.000Z | 2020-06-05T17:43:59.000Z | web/mooctracker/api/tests.py | Jaaga/mooc-tracker | b7be270d24fa2608042064dc87ae13740893bade | [
"MIT"
] | 2 | 2015-02-25T10:46:20.000Z | 2016-10-28T11:24:32.000Z | from django.test import TestCase
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from students.models import Student
from courses.models import Course
from projects.models import Project
from academics.models import Academic
from .serializers import StudentSerializer, CourseSerializer, ProjectSerializer, AcademicSerializer
# Tests for Student Model
class CreateStudentTest(APITestCase):
def setUp(self):
self.student = Student.objects.create(name='ansal', email='ansal@bssatech.com')
self.data = {'name': 'ansal', 'email': 'ansal@bssatech.com' }
def test_can_create_student(self):
response = self.client.post(reverse('student-list'), self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
class ReadStudentTest(APITestCase):
def setUp(self):
self.student = Student.objects.create(name='ansal', email='ansal@bssatech.com')
def test_can_read_student_list(self):
response = self.client.get(reverse('student-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_can_read_student_detail(self):
response = self.client.get(reverse('student-detail', args=[self.student.id]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
class UpdateStudentTest(APITestCase):
def setUp(self):
self.student = Student.objects.create(name='ansal', email='ansal@bssatech.com')
self.updated_student = Student.objects.create(name='rajeef', email='rajeefmk@gmail.com')
self.data = StudentSerializer(self.updated_student).data
def test_can_update_course(self):
response = self.client.put(reverse('student-detail', args=[self.student.id]), self.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class DeleteStudentTest(APITestCase):
def setUp(self):
self.student = Student.objects.create(name='ansal', email='ansal@bssatech.com')
def test_can_update_student(self):
response = self.client.delete(reverse('student-detail', args=[self.student.id]))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# Tests for Course Model
class CreateCourseTest(APITestCase):
def setUp(self):
self.course = Course.objects.create(course_title="Intro to Computer Science Build a Search Engine & a Social Network", url="https://www.udacity.com/course/cs101")
self.data = {'course_title': 'Intro to Computer Science Build a Search Engine & a Social Network', 'url': 'https://www.udacity.com/course/cs101'}
def test_can_create_course(self):
response = self.client.post(reverse('course-list'), self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
class ReadCourseTest(APITestCase):
def setUp(self):
self.course = Course.objects.create(course_title="Intro to Computer Science Build a Search Engine & a Social Network", url="https://www.udacity.com/course/cs101")
def test_can_read_course_list(self):
response = self.client.get(reverse('course-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_can_read_course_detail(self):
response = self.client.get(reverse('course-detail', args=[self.course.id]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
class UpdateCourseTest(APITestCase):
def setUp(self):
self.course = Course.objects.create(course_title="Intro to Computer Science Build a Search Engine & a Social Network", url="https://www.udacity.com/course/cs101")
self.updated_course = Course.objects.create(course_title="Intro to Computer Science", url="https://www.udacity.com/course/cs101")
self.data = CourseSerializer(self.updated_course).data
def test_can_update_course(self):
response = self.client.put(reverse('course-detail', args=[self.course.id]), self.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class DeleteCourseTest(APITestCase):
def setUp(self):
self.course = Course.objects.create(course_title="Intro to Computer Science Build a Search Engine & a Social Network", url="https://www.udacity.com/course/cs101")
def test_can_update_course(self):
response = self.client.delete(reverse('course-detail', args=[self.course.id]))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# Tests for Project Model
class CreateProjectTest(APITestCase):
def setUp(self):
self.project = Project.objects.create(project_name="Django Poll App ( Django version 1.6)", url="https://docs.djangoproject.com/en/1.6/intro/tutorial01/")
self.data = {'project_name': 'Django Poll App ( Django version 1.6)', 'url': 'https://docs.djangoproject.com/en/1.6/intro/tutorial01/'}
def test_can_create_project(self):
response = self.client.post(reverse('project-list'), self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
class ReadProjectTest(APITestCase):
def setUp(self):
self.project = Project.objects.create(project_name="Django Poll App ( Django version 1.6)", url="https://docs.djangoproject.com/en/1.6/intro/tutorial01/")
def test_can_read_project_list(self):
response = self.client.get(reverse('project-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_can_read_project_detail(self):
response = self.client.get(reverse('project-detail', args=[self.project.id]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
class UpdateProjectTest(APITestCase):
def setUp(self):
self.project = Project.objects.create(project_name="Django Poll App ( Django version 1.6)", url="https://docs.djangoproject.com/en/1.6/intro/tutorial01/")
self.updated_project = Project.objects.create(project_name="Django Poll App", url="https://docs.djangoproject.com/en/1.6/intro/tutorial01/")
self.data = ProjectSerializer(self.updated_project).data
def test_can_update_project(self):
response = self.client.put(reverse('project-detail', args=[self.project.id]), self.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class DeleteProjectTest(APITestCase):
def setUp(self):
self.project = Project.objects.create(project_name="Django Poll App ( Django version 1.6)", url="https://docs.djangoproject.com/en/1.6/intro/tutorial01/")
def test_can_update_project(self):
response = self.client.delete(reverse('project-detail', args=[self.project.id]))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# Tests for Academics Model
class CreateAcademicTest(APITestCase):
def setUp(self):
self.student = Student.objects.create(name='ansal')
self.course = Course.objects.create(course_title="Intro to Computer Science Build a Search Engine & a Social Network", url="https://www.udacity.com/course/cs101")
self.academic = Academic.objects.create(student=self.student, course=self.course)
self.data = {'student': 'http://localhost:8000/api/students/1/', 'course': 'http://localhost:8000/api/courses/1/'}
def test_can_create_academics(self):
response = self.client.post(reverse('academic-list'), self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
class ReadAcademicTest(APITestCase):
def setUp(self):
self.student = Student.objects.create(name='ansal')
self.course = Course.objects.create(course_title="Intro to Computer Science Build a Search Engine & a Social Network", url="https://www.udacity.com/course/cs101")
self.academic = Academic.objects.create(student=self.student, course=self.course)
def test_can_read_academic_list(self):
response = self.client.get(reverse('academic-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_can_read_academic_detail(self):
response = self.client.get(reverse('academic-detail', args=[self.academic.id]))
self.assertEqual(response.status_code, status.HTTP_200_OK)
class UpdateAcademicTest(APITestCase):
def setUp(self):
self.student = Student.objects.create(name='ansal')
self.course = Course.objects.create(course_title="Intro to Computer Science Build a Search Engine & a Social Network", url="https://www.udacity.com/course/cs101")
self.academic = Academic.objects.create(student=self.student, course=self.course)
self.new_student = Student.objects.create(name='santu')
self.updated_academic = Academic.objects.create(student=self.new_student, course=self.course)
self.data = AcademicSerializer(self.updated_academic).data
def test_can_update_academic(self):
response = self.client.put(reverse('academic-detail', args=[self.academic.id]), self.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class DeleteAcademicTest(APITestCase):
def setUp(self):
self.student = Student.objects.create(name='ansal')
self.course = Course.objects.create(course_title="Intro to Computer Science Build a Search Engine & a Social Network", url="https://www.udacity.com/course/cs101")
self.academic = Academic.objects.create(student=self.student, course=self.course)
def test_can_update_academic(self):
response = self.client.delete(reverse('academic-detail', args=[self.academic.id]))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
| 48.041667 | 170 | 0.759974 | 1,257 | 9,224 | 5.449483 | 0.090692 | 0.055037 | 0.029197 | 0.064234 | 0.845693 | 0.821022 | 0.783212 | 0.74 | 0.721606 | 0.673285 | 0 | 0.016324 | 0.110039 | 9,224 | 192 | 171 | 48.041667 | 0.818126 | 0.010408 | 0 | 0.485507 | 0 | 0 | 0.225997 | 0 | 0 | 0 | 0 | 0 | 0.144928 | 1 | 0.26087 | false | 0 | 0.065217 | 0 | 0.442029 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
991e586c41de12219d18ee3992a94ccd47e55142 | 4,554 | py | Python | tests/resources/test_trade.py | danielcoker/embedpy | 6af5a794a50e3b9b03efb03eadb0ba46dca2cd8d | [
"MIT"
] | null | null | null | tests/resources/test_trade.py | danielcoker/embedpy | 6af5a794a50e3b9b03efb03eadb0ba46dca2cd8d | [
"MIT"
] | 1 | 2022-01-12T14:13:39.000Z | 2022-01-12T14:35:43.000Z | tests/resources/test_trade.py | danielcoker/embedpy | 6af5a794a50e3b9b03efb03eadb0ba46dca2cd8d | [
"MIT"
] | 2 | 2021-07-15T11:16:29.000Z | 2022-03-28T01:07:31.000Z | from embed.resources.trade import Trade
from embed import errors
from unittest.mock import MagicMock, patch
import json
import pytest
@patch("embed.common.APIResponse.get_essential_details")
def test_can_get_stocks(mock_get_essential_details, api_session):
trade = Trade(api_session)
mock_get_essential_details.return_value = MagicMock()
trade.get_stocks()
trade.get_essential_details.assert_called_with(
"GET",
f"{api_session.base_url}/api/{api_session.api_version}/stocks/assets",
)
@patch("embed.common.APIResponse.get_essential_details")
def test_can_get_stocks(mock_get_essential_details, api_session):
trade = Trade(api_session)
mock_get_essential_details.return_value = MagicMock()
trade.get_single_position(account_id="fake-id", stock_symbol="SYBL")
trade.get_essential_details.assert_called_with(
"GET",
f"{api_session.base_url}/api/{api_session.api_version}/stocks/SYBL/positions?account_id=fake-id",
)
@patch("embed.common.APIResponse.get_essential_details")
def test_can_get_orders(mock_get_essential_details, api_session):
trade = Trade(api_session)
mock_get_essential_details.return_value = MagicMock()
trade.get_orders(account_id="fake-id")
trade.get_essential_details.assert_called_with(
"GET",
f"{api_session.base_url}/api/{api_session.api_version}/stocks/orders?account_id=fake-id&status=open",
)
@patch("embed.common.APIResponse.get_essential_details")
def test_can_get_profile(mock_get_essential_details, api_session):
trade = Trade(api_session)
mock_get_essential_details.return_value = MagicMock()
trade.get_profile(account_id="fake-id")
trade.get_essential_details.assert_called_with(
"GET",
f"{api_session.base_url}/api/{api_session.api_version}/stocks/profile?account_id=fake-id",
)
@patch("embed.common.APIResponse.get_essential_details")
def test_can_get_position(mock_get_essential_details, api_session):
trade = Trade(api_session)
mock_get_essential_details.return_value = MagicMock()
trade.get_position(account_id="fake-id")
trade.get_essential_details.assert_called_with(
"GET",
f"{api_session.base_url}/api/{api_session.api_version}/stocks/positions?account_id=fake-id",
)
@patch("embed.common.APIResponse.get_essential_details")
def test_can_buy_stock(mock_get_essential_details, api_session):
trade = Trade(api_session)
mock_get_essential_details.return_value = MagicMock()
test_data = {
"account_id": "fake-id",
"symbol": "sym",
"amount": 200,
"side": "side",
"the_type": "type",
"time_in_force": "tif",
}
trade.buy_stock(
account_id=test_data.get("account_id"),
symbol=test_data.get("symbol"),
amount=test_data.get("amount"),
side=test_data.get("side"),
the_type=test_data.get("the_type"),
time_in_force=test_data.get("time_in_force"),
)
trade.get_essential_details.assert_called_with(
"POST",
f"{api_session.base_url}/api/{api_session.api_version}/stocks/buy",
json.dumps(test_data),
)
@patch("embed.common.APIResponse.get_essential_details")
def test_can_sell_stock(mock_get_essential_details, api_session):
trade = Trade(api_session)
mock_get_essential_details.return_value = MagicMock()
test_data = {
"account_id": "fake-id",
"symbol": "sym",
"amount": 200,
"side": "side",
"the_type": "type",
"time_in_force": "tif",
}
trade.sell_stock(
account_id=test_data.get("account_id"),
symbol=test_data.get("symbol"),
amount=test_data.get("amount"),
side=test_data.get("side"),
the_type=test_data.get("the_type"),
time_in_force=test_data.get("time_in_force"),
)
trade.get_essential_details.assert_called_with(
"POST",
f"{api_session.base_url}/api/{api_session.api_version}/stocks/sell",
json.dumps(test_data),
)
@patch("embed.common.APIResponse.get_essential_details")
def test_can_close_all_positions(mock_get_essential_details, api_session):
trade = Trade(api_session)
mock_get_essential_details.return_value = MagicMock()
test_data = {"account_id": "fake-id"}
trade.close_all_positions(account_id=test_data.get("account_id"))
trade.get_essential_details.assert_called_with(
"DELETE",
f"{api_session.base_url}/api/{api_session.api_version}/stocks/positions?account_id=fake-id",
)
| 35.858268 | 109 | 0.716513 | 616 | 4,554 | 4.904221 | 0.103896 | 0.12711 | 0.201258 | 0.121814 | 0.919894 | 0.894406 | 0.894406 | 0.885468 | 0.871566 | 0.871566 | 0 | 0.001569 | 0.160299 | 4,554 | 126 | 110 | 36.142857 | 0.788441 | 0 | 0 | 0.645455 | 0 | 0.018182 | 0.292271 | 0.222442 | 0 | 0 | 0 | 0 | 0.072727 | 1 | 0.072727 | false | 0 | 0.045455 | 0 | 0.118182 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
996de4e7265223b52d189f66385021d7d8f34c04 | 24,997 | py | Python | port/modules/weather_icon.py | diskman88/mpython-desktop-robot | 01cd15fbeeba521ab874cf66f94d3909c4f8c39a | [
"MIT"
] | 53 | 2018-10-15T12:01:24.000Z | 2019-11-22T09:31:02.000Z | port/modules/weather_icon.py | diskman88/mpython-desktop-robot | 01cd15fbeeba521ab874cf66f94d3909c4f8c39a | [
"MIT"
] | 10 | 2018-10-17T13:42:19.000Z | 2019-11-25T06:42:40.000Z | port/modules/weather_icon.py | diskman88/mpython-desktop-robot | 01cd15fbeeba521ab874cf66f94d3909c4f8c39a | [
"MIT"
] | 26 | 2018-12-04T03:53:39.000Z | 2019-11-22T03:40:05.000Z | from micropython import const
WIDTH=const(38)
HEIGHT=const(38)
# 晴,code 0/2
sunny = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00'
b'\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00\x00\x78\x00\x00\x03\xC0'
b'\x78\x07\x00\x03\xE0\x78\x0F\x00\x03\xE0\x00\x1F\x00\x01\xF0\x00'
b'\x3F\x00\x00\xF1\xFE\x3C\x00\x00\x63\xFF\x18\x00\x00\x0F\xFF\xC0'
b'\x00\x00\x0F\xFF\xC0\x00\x00\x1F\x87\xE0\x00\x00\x3E\x01\xF0\x00'
b'\x00\x3E\x00\xF0\x00\x7F\x3C\x00\xF3\xF8\x7F\x3C\x00\xF3\xF8\x7F'
b'\x3C\x00\xF3\xF8\x7F\x3C\x00\xF3\xF8\x00\x3E\x00\xF0\x00\x00\x3E'
b'\x01\xF0\x00\x00\x1F\x83\xE0\x00\x00\x0F\xFF\xC0\x00\x00\x0F\xFF'
b'\xC0\x00\x00\x63\xFF\x18\x00\x00\xF1\xFE\x3C\x00\x01\xF0\x00\x3E'
b'\x00\x03\xE0\x00\x1F\x00\x03\xE0\x78\x0F\x00\x03\xC0\x78\x07\x00'
b'\x00\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00'
b'\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00\x00\x00\x00\x00'
)
# 晴,code 1/3
clear = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7C\x00\x00\x00'
b'\x07\xFE\x00\x00\x00\x1F\xFE\x00\x00\x00\x7F\xFE\x00\x00\x00\xFF'
b'\xFE\x00\x00\x01\xFF\xBE\x00\x00\x03\xFC\x3E\x00\x00\x07\xF8\x1F'
b'\x00\x00\x07\xF0\x1F\x00\x00\x0F\xE0\x1F\x80\x00\x0F\xC0\x0F\xC0'
b'\x00\x1F\x80\x0F\xE0\x00\x1F\x80\x07\xF8\x00\x1F\x00\x03\xFF\x00'
b'\x1F\x00\x01\xFF\xE0\x1F\x00\x00\xFF\xE0\x1F\x00\x00\x3F\xE0\x1F'
b'\x00\x00\x0F\xE0\x1F\x00\x00\x01\xE0\x1F\x00\x00\x03\xE0\x1F\x00'
b'\x00\x03\xE0\x1F\x80\x00\x03\xE0\x1F\x80\x00\x07\xE0\x0F\xC0\x00'
b'\x0F\xC0\x0F\xE0\x00\x0F\xC0\x07\xF0\x00\x3F\x80\x07\xF8\x00\x7F'
b'\x80\x03\xFE\x01\xFF\x00\x01\xFF\xCF\xFE\x00\x00\xFF\xFF\xFC\x00'
b'\x00\x7F\xFF\xF8\x00\x00\x1F\xFF\xE0\x00\x00\x07\xFF\x80\x00\x00'
b'\x00\x78\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 多云,code 4
cloud = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x07\xFC\x00\x00\x00\x0F\xFF\x00\x00\x00\x3F\xFF'
b'\x80\x00\x00\x3F\xBF\xC0\x00\x00\x7E\x07\xE0\x00\x00\xF8\x03\xE0'
b'\x00\x00\xF8\x01\xF0\x00\x01\xF0\x00\xFE\x00\x07\xF0\x00\xFF\x80'
b'\x0F\xE0\x00\xFF\xC0\x1F\xE0\x00\xFF\xE0\x3F\x00\x00\x03\xF0\x3E'
b'\x00\x00\x01\xF0\x3C\x00\x00\x00\xF0\x3C\x00\x00\x00\xF0\x38\x00'
b'\x00\x00\x70\x38\x00\x00\x00\x70\x3C\x00\x00\x00\xF0\x3C\x00\x00'
b'\x00\xF0\x3E\x00\x00\x01\xF0\x1F\xC0\x00\x0F\xE0\x1F\xFF\xFF\xFF'
b'\xE0\x07\xFF\xFF\xFF\xC0\x03\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 晴间多云,code 5
day_partly_cloudy = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00'
b'\x00\x38\x00\x00\x00\x00\x38\x00\x00\x00\x00\x38\x00\x00\x00\x00'
b'\x38\x00\x00\x03\x80\x38\x03\x00\x03\xC0\x38\x07\x80\x03\xE0\x10'
b'\x0F\x80\x03\xF0\x00\x1F\x80\x01\xF0\x30\x1F\x00\x00\xF1\xFF\x1E'
b'\x00\x00\x07\xFF\xC0\x00\x00\x0F\xFF\xE0\x00\x00\x0F\xC7\xE0\x00'
b'\x00\x1F\x01\xF0\x00\x00\x1E\x00\xF0\x00\x00\x3E\x00\xF8\x00\x7E'
b'\x3C\x00\x79\xF8\x7E\x3F\x80\x79\xF8\x7E\x7F\xC0\x79\xF8\x3C\xFF'
b'\xE0\xF8\xF0\x00\xFF\xF0\xF0\x00\x01\xF1\xFD\xF0\x00\x07\xF0\xFF'
b'\xF0\x00\x0F\xE0\xFF\xE0\x00\x0F\xE0\x7F\xC0\x00\x0F\x80\x0F\x9C'
b'\x00\x0F\x00\x07\x9E\x00\x0F\x00\x07\x9F\x00\x0F\x80\x0F\x9F\x80'
b'\x0F\xFF\xFF\x8F\x80\x07\xFF\xFF\x07\x80\x03\xFF\xFE\x00\x00\x00'
b'\xFF\xF8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 夜晚晴间多云,code 6
night_partly_cloudy = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1F\xF0\x00\x00'
b'\x00\x7F\xF8\x00\x00\x01\xFF\xF8\x00\x00\x03\xFF\xF8\x00\x00\x07'
b'\xFF\xF8\x00\x00\x07\xF8\xF8\x00\x00\x0F\xE0\xF8\x00\x00\x1F\xC0'
b'\xFC\x00\x00\x1F\x80\xFC\x00\x00\x1F\x80\x7E\x00\x00\x1F\x00\x7E'
b'\x00\x00\x1F\x00\x7F\x00\x00\x3F\xE0\x3F\xC0\x00\x3F\xF8\x1F\xF8'
b'\x00\x7F\xFC\x0F\xF8\x00\xFF\xFE\x07\xF8\x01\xFF\xFF\x03\xF8\x01'
b'\xFC\x7F\x03\xF0\x03\xF8\x3F\xE7\xF0\x0F\xF0\x1F\xFF\xE0\x1F\xE0'
b'\x1F\xFF\xE0\x3F\xE0\x0F\xFF\xC0\x3F\xE0\x0F\xFF\x80\x7F\x80\x00'
b'\xFF\x00\x7E\x00\x00\x3F\x00\x7E\x00\x00\x3F\x00\x7E\x00\x00\x3F'
b'\x00\x7E\x00\x00\x3F\x00\x7F\x00\x00\xFE\x00\x3F\xFF\xFF\xFE\x00'
b'\x3F\xFF\xFF\xFE\x00\x1F\xFF\xFF\xFC\x00\x0F\xFF\xFF\xF8\x00\x07'
b'\xFF\xFF\xE0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 夜晚大部多云,code 7
night_cloudy = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3E\x00\x00\x00'
b'\x01\xFF\x00\x00\x00\x03\xFF\x00\x00\x00\x07\xFF\x00\x00\x0F\xFF'
b'\xEF\x00\x00\x1F\xFF\x8F\x00\x00\x3F\xFF\x0F\x80\x00\x7F\x3F\x87'
b'\x80\x00\xFC\x0F\xC7\xE0\x00\xF0\x03\xC3\xF8\x01\xF0\x03\xE3\xF8'
b'\x03\xE0\x01\xFE\xF8\x0F\xE0\x01\xFF\xF0\x1F\xE0\x01\xFF\xF0\x3F'
b'\xE0\x01\xFF\xE0\x3E\x00\x00\x07\xE0\x7C\x00\x00\x03\xE0\x78\x00'
b'\x00\x01\xE0\x78\x00\x00\x01\xE0\x78\x00\x00\x00\xE0\x78\x00\x00'
b'\x01\xE0\x78\x00\x00\x01\xE0\x7C\x00\x00\x03\xE0\x3F\x00\x00\x07'
b'\xC0\x1F\xFF\xFF\xFF\xC0\x1F\xFF\xFF\xFF\x80\x07\xFF\xFF\xFF\x00'
b'\x01\xFF\xFF\xF8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 日间大部多云,code 8
day_cloudy = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x07\x00\x00\x00\x00'
b'\x07\x00\x00\x00\x00\x07\x00\x00\x00\x0C\x07\x01\x80\x00\x0F\x07'
b'\x03\xC0\x00\x0F\x00\x07\xC0\x00\x0F\x00\x0F\x80\x00\x00\x1F\xC7'
b'\x00\x00\x1F\xBF\xF2\x00\x00\x7F\xFF\xF8\x00\x00\xFF\xF8\xF8\x00'
b'\x01\xFF\xF8\x3C\x00\x03\xE0\x78\x3C\x00\x03\xC0\x3C\x1C\xF8\x07'
b'\x80\x3F\x1C\xF8\x1F\x80\x1F\xFC\xF8\x3F\x80\x1F\xFC\x00\x7F\x00'
b'\x1F\xFC\x00\x78\x00\x00\xF8\x00\x78\x00\x00\x78\x00\x70\x00\x00'
b'\x38\x00\x70\x00\x00\x3B\x80\x70\x00\x00\x3B\x80\x78\x00\x00\x7B'
b'\xC0\x7C\x00\x00\xFB\x80\x3F\xFF\xFF\xF0\x00\x3F\xFF\xFF\xE0\x00'
b'\x0F\xFF\xFF\xC0\x00\x01\xFF\xFE\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 '
)
# 阴天,code 9
cloudy = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x0F\xC0\x00\x00\x00\x3F\xF0\x00\x00\x00\x7F'
b'\xF8\x00\x00\x07\x7C\xFC\x00\x00\x3F\xF8\x3C\x00\x00\x7F\xF0\x1F'
b'\x00\x00\xFF\xF8\x1F\xC0\x01\xF0\x7C\x0F\xE0\x01\xE0\x3E\x0F\xF0'
b'\x03\xC0\x1E\x00\xF8\x0F\xC0\x1F\xE0\x78\x1F\x80\x0F\xF0\x38\x3F'
b'\x80\x0F\xF8\x78\x3C\x00\x00\x78\x78\x78\x00\x00\x3C\xF8\x78\x00'
b'\x00\x3F\xF0\x70\x00\x00\x1F\xE0\x70\x00\x00\x1F\xC0\x78\x00\x00'
b'\x3C\x00\x7C\x00\x00\x7C\x00\x3F\x00\x00\xF8\x00\x1F\xFF\xFF\xF8'
b'\x00\x0F\xFF\xFF\xF0\x00\x07\xFF\xFF\xC0\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 下雨,code 10 11 19
shower = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\xE0\x00\x00\x00\x07\xFE\x00\x00\x00'
b'\x1F\xFF\x00\x00\x00\x3F\xFF\xC0\x00\x00\x7F\x9F\xC0\x00\x00\x7C'
b'\x07\xE0\x00\x00\xF8\x01\xF0\x00\x00\xF0\x01\xF0\x00\x01\xF0\x00'
b'\xFE\x00\x07\xE0\x00\xFF\x80\x0F\xE0\x00\xFF\xE0\x1F\xE0\x00\x7F'
b'\xE0\x3F\x00\x00\x03\xF0\x3E\x00\x00\x00\xF0\x7C\x00\x00\x00\xF8'
b'\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78\x07\x80\xF0\x78\x78'
b'\x07\x80\xF0\x78\x7C\x0F\x80\xF0\xF8\x3E\x0F\x00\xF1\xF0\x3F\x07'
b'\x00\xE3\xF0\x1F\xC0\x38\x0F\xE0\x0F\xC0\x78\x0F\xC0\x07\xC0\x78'
b'\x0F\x80\x01\xDC\x7B\xCE\x00\x00\x3C\x73\xC0\x00\x00\x3C\x27\xC0'
b'\x00\x00\x3C\x07\x80\x00\x00\x3C\x03\x80\x00\x00\x01\xC0\x00\x00'
b'\x00\x01\xE0\x00\x00\x00\x01\xE0\x00\x00\x00\x03\xE0\x00\x00\x00'
b'\x01\xC0\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00'
)
# 雷阵雨伴有冰雹,code 12
shower_hail = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\xF0\x00\x00\x00\x07\xFE\x00\x00\x00'
b'\x1F\xFF\x00\x00\x00\x3F\xFF\xC0\x00\x00\x7F\x1F\xC0\x00\x00\xFC'
b'\x07\xE0\x00\x00\xF8\x01\xF0\x00\x00\xF0\x01\xF0\x00\x01\xF0\x00'
b'\xFE\x00\x07\xE0\x00\xFF\x80\x1F\xE0\x00\xFF\xE0\x1F\xE0\x00\x7F'
b'\xE0\x3F\x00\x00\x03\xF0\x3E\x00\x00\x01\xF0\x7C\x00\x00\x00\xF8'
b'\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78\x07\x9C\xF0\x78\x78'
b'\x07\xBC\xF0\x78\x7C\x0F\xBC\xF0\xF8\x3E\x0F\x3D\xF1\xF0\x3F\x0F'
b'\x3D\xE3\xF0\x1F\xCF\x79\xEF\xE0\x0F\xDF\x79\xEF\xC0\x07\xDE\x79'
b'\xEF\x80\x01\xCE\xF9\xCE\x00\x00\x00\xF0\x00\x00\x00\x3C\xF3\x80'
b'\x00\x00\x3C\xF7\x80\x00\x00\x3C\xF3\x80\x00\x00\x00\xE0\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x01\xC0\x00\x00\x00\x03\xC0\x00\x00\x00'
b'\x03\xC0\x00\x00\x00\x01\x80\x00\x00\x00\x00\x00\x00\x00'
)
# 小雨,code 13
light_rain = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\xF0\x00\x00\x00\x07\xFE\x00\x00\x00\x1F'
b'\xFF\x00\x00\x00\x3F\xFF\xC0\x00\x00\x7F\x1F\xC0\x00\x00\x7C\x07'
b'\xE0\x00\x00\xF8\x01\xF0\x00\x00\xF0\x01\xF0\x00\x01\xF0\x00\xFE'
b'\x00\x07\xE0\x00\xFF\x80\x1F\xE0\x00\xFF\xE0\x1F\xE0\x1C\x7F\xE0'
b'\x3F\x00\x1C\x03\xF0\x3C\x00\x1E\x01\xF0\x7C\x06\x1E\x00\xF8\x78'
b'\x0F\x1C\x00\x78\x78\x0F\x00\x00\x78\x78\x1F\x80\x00\x78\x78\x1F'
b'\x88\x00\x78\x7C\x1F\x9C\x00\xF8\x3E\x0F\x3E\x01\xF0\x3F\x00\x7F'
b'\x03\xF0\x1F\xC0\x7F\x0F\xE0\x0F\xC0\xFF\x8F\xC0\x07\xC0\xFF\x8F'
b'\x80\x01\xC0\xFF\x8E\x00\x00\x00\xFF\x80\x00\x00\x00\xFF\x00\x00'
b'\x00\x00\x7F\x00\x00\x00\x00\x1C\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 中雨 ,code 14
moderate_rain = bytearray(
b'\x00\x00\x00\x00\x00\x00\x01\xF8\x00\x00\x00\x0F\xFE\x00\x00\x00'
b'\x1F\xFF\x80\x00\x00\x3F\xFF\xC0\x00\x00\x7F\x0F\xE0\x00\x00\xFC'
b'\x03\xE0\x00\x00\xF8\x01\xF0\x00\x00\xF0\x01\xF0\x00\x03\xF0\x00'
b'\xFF\x00\x0F\xE0\x00\xFF\xC0\x1F\xE0\x00\xFF\xE0\x3F\xE0\x00\x7F'
b'\xF0\x3F\x00\x00\x03\xF0\x7C\x00\x00\x00\xF8\x7C\x00\x00\x00\xF8'
b'\x78\x00\x00\x00\x78\x78\x03\x00\x00\x78\x78\x07\x80\xF0\x78\x78'
b'\x07\x80\xF0\x78\x7C\x0F\x80\xF0\xF8\x3E\x0F\x00\xF1\xF0\x3F\x87'
b'\x00\xE7\xF0\x1F\xCE\x79\xCF\xE0\x0F\xDE\x79\xCF\xC0\x07\xCE\x79'
b'\xCF\x80\x00\xC4\x78\x8C\x00\x00\x1C\x73\x80\x00\x00\x3C\xE3\x80'
b'\x00\x00\x3C\xE7\x80\x00\x00\x3C\xE3\x80\x00\x00\x18\xE3\x00\x00'
b'\x00\x01\xC0\x00\x00\x00\x01\xC0\x00\x00\x00\x03\xC0\x00\x00\x00'
b'\x03\xC0\x00\x00\x00\x01\x80\x00\x00\x00\x00\x00\x00\x00'
)
# 大雨,code 15
heavy_rain = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xFE\x00\x00\x00'
b'\x1F\xFF\x00\x00\x00\x3F\xFF\x80\x00\x00\x7F\xBF\xC0\x00\x00\x7C'
b'\x07\xE0\x00\x00\xF8\x03\xE0\x00\x00\xF0\x01\xF0\x00\x01\xF0\x00'
b'\xFE\x00\x07\xE0\x00\xFF\x80\x0F\xE0\x00\xFF\xC0\x1F\xE0\x00\x7F'
b'\xE0\x3F\x00\x00\x03\xF0\x3E\x00\x00\x01\xF0\x7C\x00\x00\x00\xF8'
b'\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78\x07\x1C\xF0\x78\x78'
b'\x07\xBC\xF0\x78\x7C\x0F\xBC\xF0\xF8\x7E\x0F\x3D\xF1\xF0\x3F\x0F'
b'\x3D\xE3\xF0\x1F\xCF\x7D\xEF\xE0\x1F\xDF\x79\xEF\xE0\x07\xDE\x7B'
b'\xEF\x80\x01\xDE\xFB\xCE\x00\x00\x3E\xF3\xC0\x00\x00\x3C\xF7\xC0'
b'\x00\x00\x3C\xF7\x80\x00\x00\x3D\xF3\x80\x00\x00\x01\xE0\x00\x00'
b'\x00\x01\xE0\x00\x00\x00\x03\xE0\x00\x00\x00\x03\xC0\x00\x00\x00'
b'\x01\xC0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 暴雨,code 16~18
storm = bytearray(
b'\x00\x00\x00\x00\x00\x00\x01\xF8\x00\x00\x00\x0F\xFE\x00\x00\x00'
b'\x1F\xFF\x80\x00\x00\x3F\xFF\xC0\x00\x00\x7F\x0F\xE0\x00\x00\xFC'
b'\x03\xE0\x00\x00\xF8\x01\xF0\x00\x00\xF0\x01\xF0\x00\x03\xF0\x00'
b'\xFF\x00\x0F\xE0\x00\xFF\xC0\x1F\xE0\x00\xFF\xE0\x3F\xE0\x00\x7F'
b'\xF0\x3F\x00\x00\x03\xF0\x7C\x00\x00\x00\xF8\x7C\x00\x00\x00\xF8'
b'\x78\x00\x00\x00\x78\x78\x00\x08\x20\x78\x78\x3F\x1C\xF0\x78\x78'
b'\x7F\x3C\xF0\x78\x7C\x7F\x3C\xF0\xF8\x3E\xFE\x3D\xF1\xF0\x3F\xFC'
b'\x3D\xE7\xF0\x1F\xFC\x79\xEF\xE0\x0F\xF8\x79\xEF\xC0\x07\xF8\x7B'
b'\xEF\x80\x01\xFF\xFB\xCC\x00\x03\xFF\xF3\xC0\x00\x03\xFE\xF3\xC0'
b'\x00\x00\x3C\xF3\xC0\x00\x00\x7D\xF3\x80\x00\x00\x79\xE0\x00\x00'
b'\x00\x71\xE0\x00\x00\x00\x71\xE0\x00\x00\x00\xE1\xE0\x00\x00\x00'
b'\xC1\xC0\x00\x00\x00\xC0\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 雨夹雪 ,code 20
sleet = bytearray(
b'\x00\x00\x00\x00\x00\x00\x01\xF8\x00\x00\x00\x0F\xFE\x00\x00\x00'
b'\x1F\xFF\x80\x00\x00\x3F\xFF\xC0\x00\x00\x7F\x0F\xE0\x00\x00\xFC'
b'\x03\xE0\x00\x00\xF8\x01\xF0\x00\x00\xF0\x01\xF0\x00\x03\xF0\x00'
b'\xFF\x00\x0F\xE0\x00\xFF\xC0\x1F\xE0\x00\xFF\xE0\x3F\xE0\x00\x7F'
b'\xF0\x3F\x00\x00\x03\xF0\x7C\x00\x00\x00\xF8\x7C\x00\x00\x00\xF8'
b'\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78'
b'\x00\x00\x00\x78\x7C\x00\x00\x00\xF8\x3E\x00\x00\x01\xF0\x3F\x80'
b'\x00\x07\xF0\x1F\xCE\x79\xCF\xE0\x0F\xDE\x79\xCF\xC0\x07\xCE\x79'
b'\xCF\x80\x00\x4C\x79\x8C\x00\x00\x1C\x73\x80\x00\x00\x3C\xE7\x80'
b'\x00\x00\x3C\xE7\x80\x00\x00\x3C\xE7\x80\x00\x00\x18\xE3\x00\x00'
b'\x00\x01\xC0\x00\x00\x00\x03\xC0\x00\x00\x00\x03\xC0\x00\x00\x00'
b'\x03\xC0\x00\x00\x00\x01\x80\x00\x00\x00\x00\x00\x00\x00'
)
# 小中雪,code 21 22 23
snow = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xFC\x00\x00\x00'
b'\x1F\xFF\x00\x00\x00\x3F\xFF\x80\x00\x00\x7F\x9F\xC0\x00\x00\x7C'
b'\x07\xE0\x00\x00\xF8\x03\xE0\x00\x00\xF0\x01\xF0\x00\x01\xF0\x00'
b'\xFC\x00\x07\xE0\x00\xFF\x80\x0F\xE0\x00\xFF\xC0\x1F\xE0\x00\x7F'
b'\xE0\x3F\x00\x00\x03\xF0\x3E\x00\x00\x01\xF0\x7C\x00\x00\x00\xF8'
b'\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78'
b'\x00\x00\x00\x78\x7C\x00\x30\x00\xF8\x7C\x00\x78\x00\xF8\x3F\x00'
b'\x78\x03\xF0\x3F\xCE\x79\xCF\xE0\x1F\xCF\x01\xCF\xE0\x0F\xCF\x01'
b'\xCF\x80\x03\xCE\x31\xCF\x00\x00\x00\x78\x00\x00\x00\x00\x78\x00'
b'\x00\x00\x06\x79\xC0\x00\x00\x0E\x01\xC0\x00\x00\x0F\x01\xC0\x00'
b'\x00\x0E\x01\xC0\x00\x00\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00'
b'\x00\x78\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 大雪 ,code 24
heavy_snow = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xFC\x00\x00\x00'
b'\x1F\xFF\x00\x00\x00\x3F\xFF\x80\x00\x00\x7F\x9F\xC0\x00\x00\x7C'
b'\x07\xE0\x00\x00\xF8\x03\xE0\x00\x00\xF0\x01\xF0\x00\x01\xF0\x00'
b'\xFC\x00\x07\xE0\x00\xFF\x80\x0F\xE0\x00\xFF\xC0\x1F\xE0\x00\x7F'
b'\xE0\x3F\x00\x00\x03\xF0\x3E\x00\x00\x01\xF0\x7C\x00\x00\x00\xF8'
b'\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78\x00\x00\x00\x78\x78'
b'\x00\x00\x00\x78\x7C\x00\x30\x00\xF8\x7C\x00\x78\x00\xF8\x3F\x00'
b'\x78\x03\xF0\x3F\xCE\x79\xCF\xE0\x1F\xCF\x01\xCF\xE0\x0F\xCF\x01'
b'\xCF\x80\x03\xCE\x61\xCF\x00\x00\x00\x70\x00\x00\x00\x00\xF0\x00'
b'\x00\x00\x18\x73\x00\x00\x00\x3C\x07\x80\x00\x00\x3C\x07\x80\x00'
b'\x00\x1C\x07\x80\x00\x00\x01\xE0\x00\x00\x00\x01\xE0\x00\x00\x00'
b'\x01\xE0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 沙尘暴,code 25~29
dust_storm = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x7F\xF3\xFF\xFE\x78\x7F\xFF\xFF\xFF\xF8\x7F\xFB\xFF'
b'\xFE\xF8\x7F\xF3\xFF\xFE\x78\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x01\xE7\xFF\x9F\xE0\x01\xEF\xFF\xBF\xE0\x03\xEF\xFF\xBF\xE0'
b'\x01\xEF\xFF\xBF\xE0\x01\xE7\xFF\x9F\xE0\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x3F\xFF\xCF\x3F\xF0\x7F\xFF\xEF\x7F\xF8\x7F\xFF'
b'\xFF\xFF\xF8\x7F\xFF\xEF\x7F\xF0\x3F\xFF\xC6\x3F\xF0\x00\x00\x00'
b'\x00\x00\x04\x1F\xF8\x7C\x00\x0F\x7F\xFD\xFF\x00\x1F\x7F\xFD\xFF'
b'\x00\x1F\x7F\xFD\xFF\x00\x0F\x7F\xFD\xFF\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
#雾,code 30
foggy = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\xFC\x00\x00\x00\x03\xFF\x00\x00\x00\x0F\xFF\x80\x00\x00\x1F\xFF'
b'\xC0\x00\x00\x1F\x07\xE0\x00\x00\x3E\x01\xE0\x00\x00\x3C\x01\xE0'
b'\x00\x00\xFC\x00\xFE\x00\x01\xF8\x00\xFF\x80\x03\xF8\x00\xFF\xC0'
b'\x07\xC0\x00\x07\xC0\x07\x80\x00\x03\xC0\x07\x00\x00\x01\xC0\x00'
b'\x00\x00\x00\x00\x07\xFF\xFF\xFF\xC0\x07\xFF\xFF\xFF\xC0\x07\xFF'
b'\xFF\xFF\xC0\x00\x00\x00\x00\x00\x7F\xFF\xFF\xFC\x00\x7F\xFF\xFF'
b'\xFE\x00\x7F\xFF\xFF\xFE\x00\x3F\xFF\xFF\xFC\x00\x00\x00\x00\x00'
b'\x00\x01\xFF\xFF\xFF\xF8\x01\xFF\xFF\xFF\xF8\x00\xFF\xFF\xFF\xF8'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
#霾,code 31
haze = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x01\x80\x00\x00\x00\x03\x80\x00\x00\x00'
b'\x03\x80\x00\x00\x00\x03\x80\x00\x00\x03\x83\x83\xC0\x00\x03\xC1'
b'\x87\xC0\x00\x03\xC0\x07\x80\x00\x00\x0F\xE7\x00\x00\x07\xFF\xF8'
b'\x00\x00\x1F\xFF\xF8\x00\x00\x3F\xFC\x3C\x00\x00\x7C\x3E\x1C\x00'
b'\x00\x78\x1E\x1C\xF8\x00\xF0\x0F\x9C\xF8\x03\xF0\x0F\xFC\xF8\x07'
b'\xE0\x07\xFC\x00\x07\xC0\x01\xFC\x00\x0F\x00\x00\x78\x00\x0E\x00'
b'\x00\x38\x00\x07\xFF\xFF\xFB\x00\x0F\xFF\xFF\xFB\x80\x0F\xFF\xFF'
b'\xFB\xC0\x00\x00\x00\x01\x80\x7F\xFF\xFF\xC0\x00\x7F\xFF\xFF\xC0'
b'\x00\x7F\xFF\xFF\xC0\x00\x00\x00\x00\x00\x00\x03\xFF\xFF\xFE\x00'
b'\x03\xFF\xFF\xFF\x00\x01\xFF\xFF\xFE\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 风,code 32
windy = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xFF\xFF\xF0'
b'\x00\x07\xFF\xFF\xF8\x00\x07\xFF\xFF\xF8\x00\x07\xFF\xFF\xF8\x00'
b'\x00\x00\x00\x00\x00\x3F\xFF\xFF\x3F\xF0\x7F\xFF\xFF\xBF\xF8\x7F'
b'\xFF\xFF\xBF\xF8\x7F\xFF\xFF\xBF\xF0\x00\x00\x00\x00\x00\x00\xFF'
b'\xFF\xFF\x00\x00\xFF\xFF\xFF\x00\x00\xFF\xFF\xFF\x00\x00\x7F\xFF'
b'\xFE\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 大风,code 33
strong_wind = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x03\xE0\x00\x00\x00\x07\xF0\x00\x00\x00\x07\xF8\x00\x00\x00\x07'
b'\xF8\x00\x00\x00\x02\x78\x00\x00\x00\x00\xF8\x7F\xFF\xFF\xFF\xF8'
b'\x7F\xFF\xFF\xFF\xF0\x7F\xFF\xFF\xFF\xE0\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x7F\xFF\xFF\xE0\x00\x7F\xFF\xFF\xF0\x00\x7F\xFF'
b'\xFF\xF8\x00\x00\x00\x00\x78\x00\x00\x00\x02\x78\x00\x00\x00\x07'
b'\xF8\x00\x00\x00\x07\xF8\x00\x00\x00\x07\xF0\x00\x00\x00\x03\xE0'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 飓风,code 34~36
hurricane= bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x1E\x00\x00\x00\x00\x7E\x00\x00\x00\x00\xFE\x00\x00\x00\x01'
b'\xFC\x00\x00\x00\x03\xF0\x00\x00\x00\x03\xE0\x00\x00\x00\x07\xC0'
b'\x00\x00\x00\x0F\xB0\x00\x00\x00\x0F\xFE\x00\x00\x00\x0F\xFF\x80'
b'\x00\x00\x1F\xFF\xC0\x00\x00\x1F\xFF\xC0\x00\x00\x1F\x87\xE0\x00'
b'\x00\x1F\x03\xE0\x00\x00\x1E\x01\xE0\x00\x00\x3E\x01\xF0\x00\x00'
b'\x3E\x01\xF0\x00\x00\x1E\x01\xE0\x00\x00\x1F\x03\xE0\x00\x00\x1F'
b'\x87\xE0\x00\x00\x0F\xFF\xE0\x00\x00\x0F\xFF\xE0\x00\x00\x07\xFF'
b'\xC0\x00\x00\x01\xFF\xC0\x00\x00\x00\x07\xC0\x00\x00\x00\x0F\x80'
b'\x00\x00\x00\x1F\x00\x00\x00\x00\x3F\x00\x00\x00\x00\x7E\x00\x00'
b'\x00\x01\xFC\x00\x00\x00\x01\xF8\x00\x00\x00\x01\xE0\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 冷,code 37
cold= bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x78\x00\x00\x00'
b'\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00\x80'
b'\x30\x04\x00\x01\xC0\x78\x0F\x00\x03\xC0\x78\x0F\x00\x01\xD0\x78'
b'\x2E\x00\x00\x3C\x78\xF0\x00\x00\x7E\x79\xF8\x00\x00\x3F\x7B\xF0'
b'\x00\x00\x3F\xFF\xF0\x00\x00\x1F\xFF\xE0\x00\x00\x0F\xFF\xC0\x00'
b'\x00\x07\xFF\x80\x00\x3D\xFF\xFF\xFE\xF0\x3F\xFF\xFF\xFF\xF0\x3F'
b'\xFF\xFF\xFF\xF0\x3D\xFF\xFF\xFE\xF0\x00\x07\xFF\x80\x00\x00\x0F'
b'\xFF\xC0\x00\x00\x1F\xFF\xE0\x00\x00\x3F\xFB\xF0\x00\x00\x3F\x79'
b'\xF0\x00\x00\x7E\x78\xF8\x00\x00\x3C\x78\x70\x00\x01\xD0\x78\x2E'
b'\x00\x03\xC0\x78\x0F\x00\x01\xC0\x78\x0F\x00\x00\x80\x30\x04\x00'
b'\x00\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00\x00\x78\x00\x00\x00'
b'\x00\x78\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 热,code 38
hot = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x00\x00\x00'
b'\x01\xC0\x00\x00\x00\x03\xC0\x00\x00\x00\x07\xC0\x00\x00\x00\x0F'
b'\xC0\x00\x00\x00\x1F\xC3\x00\x00\x00\x1F\xC3\x80\x00\x00\x3F\xE1'
b'\xC0\x00\x00\x3F\xF1\xE0\x00\x00\x3F\xFB\xE0\x00\x00\x7F\xFF\xF0'
b'\x00\x00\x7F\xFF\xF0\x00\x00\x7F\xFF\xF8\x00\x00\x7F\xFF\xF8\x00'
b'\x00\x7F\xFF\xF8\x00\x01\x3F\xFF\xF8\x00\x03\xBF\xFF\xF8\x00\x03'
b'\xFF\xFF\xFB\x00\x03\xFF\xFF\xFB\x00\x03\xFF\xFF\xFF\x00\x03\xFF'
b'\xFF\xFF\x00\x03\xFF\xFF\xFF\x00\x03\xFF\xFF\xFF\x00\x03\xFF\xFF'
b'\xFF\x00\x03\xFF\xFF\xFF\x00\x01\xFF\xFF\xFE\x00\x00\xFF\xFF\xFE'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xFF\xFF\xFF\x80'
b'\x0F\xFF\xFF\xFF\xC0\x0F\xFF\xFF\xFF\xC0\x0F\xFF\xFF\xFF\xC0\x07'
b'\xFF\xFF\xFF\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# NA,code 99
na = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1F\x3E\x0F\x3E'
b'\x00\x1F\x3E\x0F\x7E\x00\x1F\xBE\x1F\x7F\x00\x1F\xFE\x1E\xFF\x00'
b'\x1F\xFE\x1E\xFF\x00\x1F\xFE\x3C\xFF\x80\x1F\xFE\x3D\xFF\x80\x1F'
b'\xFE\x3D\xFF\x80\x1F\xFE\x79\xFF\xC0\x1E\xFE\x7B\xFF\xC0\x1E\xFE'
b'\xFB\xFF\xE0\x1E\x7E\xF3\xFF\xE0\x1E\x3E\xF7\xE3\xE0\x1E\x3F\xE7'
b'\xC3\xE0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
# 温度计
thermometer = bytearray(
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\xFC\x00\x00\x00\x01\xFE\x00\x00\x00\x01\xFF\x00\x00\x00\x03'
b'\xCF\x00\x00\x00\x03\xCF\x00\x00\x00\x03\xCF\x00\x00\x00\x03\xCF'
b'\x00\x00\x00\x03\xCF\x00\x00\x00\x03\xCF\x00\x00\x00\x03\xFF\x00'
b'\x00\x00\x03\xFF\x00\x00\x00\x03\xFF\x00\x00\x00\x03\xFF\x00\x00'
b'\x00\x03\xFF\x00\x00\x00\x03\xFF\x00\x00\x00\x03\xFF\x00\x00\x00'
b'\x03\xFF\x00\x00\x00\x03\xFF\x00\x00\x00\x03\xFF\x00\x00\x00\x07'
b'\xFF\x80\x00\x00\x0F\xFF\xC0\x00\x00\x0F\xFF\xC0\x00\x00\x1F\xFD'
b'\xE0\x00\x00\x1F\xFF\xE0\x00\x00\x1F\xFF\xE0\x00\x00\x1F\xFF\xE0'
b'\x00\x00\x1F\xFF\xE0\x00\x00\x0F\xFF\xC0\x00\x00\x0F\xB7\xC0\x00'
b'\x00\x07\xFF\x80\x00\x00\x07\xFF\x80\x00\x00\x01\xFE\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
)
code_map = {0: "sunny", 1: "clear", 2: "sunny", 3: "clear", 4: "cloud", 5: "day_partly_cloudy", 6: "night_partly_cloudy", 7: "night_cloudy",
8: "day_cloudy", 9: "cloudy", 10: "shower", 11: "shower", 19: "shower", 12: "shower_hail", 13: "light_rain", 14: "moderate_rain",
15: "heavy_rain", 16: "storm", 17: "storm", 18: "storm", 20: "sleet", 21: "snow", 22: "snow", 23: "snow", 24: "heavy_snow", 25: "dust_storm",
26: "dust_storm", 27: "dust_storm", 28: "dust_storm", 29: "dust_storm", 30: "foggy", 31: "haze", 32: "windy", 33: "strong_wind",
34: "hurricane", 35: "hurricane", 36: "hurricane", 37: "cold", 38: "hot", 99: "na"
}
def from_code(code):
"""通过图标代码返回对应位图数据"""
icon_name = code_map.get(code)
return eval(icon_name)
| 56.426637 | 148 | 0.676641 | 5,733 | 24,997 | 2.944183 | 0.034014 | 0.625274 | 0.645714 | 0.650512 | 0.720896 | 0.640441 | 0.602465 | 0.56354 | 0.520765 | 0.48901 | 0 | 0.348179 | 0.07933 | 24,997 | 442 | 149 | 56.554299 | 0.385244 | 0.013602 | 0 | 0.233333 | 0 | 0.830769 | 0.845754 | 0.833367 | 0 | 1 | 0 | 0 | 0 | 1 | 0.002564 | false | 0 | 0.002564 | 0 | 0.007692 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
9977f71ed8e3f8d6bd703f888b34c83cd4f7a772 | 13,313 | py | Python | test/unit/agent/collectors/nginx/accesslog/method.py | empiricompany/nginx-amplify-agent | 2ea46f037ef158e5d4f56f2532010c72c5f8842c | [
"BSD-2-Clause"
] | 1 | 2021-06-20T06:03:54.000Z | 2021-06-20T06:03:54.000Z | test/unit/agent/collectors/nginx/accesslog/method.py | SammyEnigma/nginx-amplify-agent | 81c4002c156809039933234abeb292edee3ac492 | [
"BSD-2-Clause"
] | null | null | null | test/unit/agent/collectors/nginx/accesslog/method.py | SammyEnigma/nginx-amplify-agent | 81c4002c156809039933234abeb292edee3ac492 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from collections import defaultdict
from hamcrest import *
from amplify.agent.collectors.nginx.accesslog import NginxAccessLogParser, NginxAccessLogsCollector
from test.base import NginxCollectorTestCase
from test.helpers import collected_metric
__author__ = "Mike Belov"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__license__ = ""
__maintainer__ = "Mike Belov"
__email__ = "dedm@nginx.com"
class LogsPerMethodTestCase(NginxCollectorTestCase):
def test_http_method(self):
line = '127.0.0.1 - - [02/Jul/2015:14:49:48 +0000] "GET /basic_status HTTP/1.1" 200 110 "-" ' + \
'"python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic"'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.http_method(NginxAccessLogParser().parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, has_item('counter'))
counters = metrics['counter']
assert_that(counters, has_item('nginx.http.method.get'))
assert_that(counters['nginx.http.method.get'][0][1], equal_to(1))
def test_non_standard_http_method(self):
line = '127.0.0.1 - - [02/Jul/2015:14:49:48 +0000] "PROPFIND /basic_status HTTP/1.1" 200 110 "-" ' + \
'"python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic"'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.http_method(NginxAccessLogParser().parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, has_item('counter'))
counters = metrics['counter']
assert_that(counters, has_item('nginx.http.method.other'))
assert_that(counters['nginx.http.method.other'][0][1], equal_to(1))
def test_http_status(self):
line = '127.0.0.1 - - [02/Jul/2015:14:49:48 +0000] "GET /basic_status HTTP/1.1" 200 110 "-" ' + \
'"python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic"'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.http_status(NginxAccessLogParser().parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, has_item('counter'))
counters = metrics['counter']
assert_that(counters, has_item('nginx.http.status.2xx'))
assert_that(counters['nginx.http.status.2xx'][0][1], equal_to(1))
def test_http_status_discarded(self):
line_template = (
'127.0.0.1 - - [02/Jul/2015:14:49:48 +0000] "GET /basic_status HTTP/1.1" %d 110 "-" '
'"python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic"'
)
# collect requests with $status 400 to 498
lines = [line_template % x for x in range(400, 499)]
NginxAccessLogsCollector(object=self.fake_object, tail=lines).collect()
counter = self.fake_object.statsd.flush()['metrics']['counter']
assert_that(counter, has_entries(
'C|nginx.http.status.4xx', collected_metric(99),
'C|nginx.http.status.discarded', collected_metric(0)
))
# collect single request with $status 499
tail = [line_template % 499]
NginxAccessLogsCollector(object=self.fake_object, tail=tail).collect()
counter = self.fake_object.statsd.flush()['metrics']['counter']
assert_that(counter, has_entries(
'C|nginx.http.status.4xx', collected_metric(1),
'C|nginx.http.status.discarded', collected_metric(1)
))
def test_upstreams(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status'
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="2.001, 0.345" cs=MISS'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.upstreams(NginxAccessLogParser(log_format).parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, has_item('counter'))
assert_that(metrics, has_item('timer'))
# counters
counters = metrics['counter']
assert_that(counters, has_item('nginx.upstream.request.count'))
assert_that(counters, has_item('nginx.upstream.next.count'))
assert_that(counters, has_item('nginx.cache.miss'))
assert_that(counters['nginx.upstream.request.count'][0][1], equal_to(1))
assert_that(counters['nginx.upstream.next.count'][0][1], equal_to(1))
assert_that(counters['nginx.cache.miss'][0][1], equal_to(1))
# histogram
histogram = metrics['timer']
assert_that(histogram, has_item('nginx.upstream.response.time'))
assert_that(histogram['nginx.upstream.response.time'], equal_to([2.001 + 0.345]))
def test_empty_upstreams(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time cs=$upstream_cache_status ut="$upstream_response_time"'
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 cs=- ut="-"'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.upstreams(NginxAccessLogParser(log_format).parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, equal_to(defaultdict()))
# counters
counters = metrics['counter']
assert_that(counters, equal_to({}))
# histogram
histogram = metrics['timer']
assert_that(histogram, equal_to({}))
def test_part_empty_upstreams(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status'
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="-" cs=MISS'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.upstreams(NginxAccessLogParser(log_format).parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, has_item('counter'))
# counters
counters = metrics['counter']
assert_that(counters, has_item('nginx.upstream.request.count'))
assert_that(counters, has_item('nginx.upstream.next.count'))
assert_that(counters, has_item('nginx.cache.miss'))
assert_that(counters['nginx.upstream.request.count'][0][1], equal_to(1))
assert_that(counters['nginx.upstream.next.count'][0][1], equal_to(0))
assert_that(counters['nginx.cache.miss'][0][1], equal_to(1))
# histogram
histogram = metrics['timer']
assert_that(histogram, equal_to({}))
def test_part_empty_upstreams2(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status'
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="2.001, 0.345" cs=-'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.upstreams(NginxAccessLogParser(log_format).parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, has_item('counter'))
assert_that(metrics, has_item('timer'))
# counters
counters = metrics['counter']
assert_that(counters, has_item('nginx.upstream.request.count'))
assert_that(counters, has_item('nginx.upstream.next.count'))
assert_that(counters, not has_item('nginx.cache.miss'))
assert_that(counters['nginx.upstream.request.count'][0][1], equal_to(1))
assert_that(counters['nginx.upstream.next.count'][0][1], equal_to(1))
# histogram
histogram = metrics['timer']
assert_that(histogram, has_item('nginx.upstream.response.time'))
assert_that(histogram['nginx.upstream.response.time'], equal_to([2.001 + 0.345]))
def test_upstream_status_and_length(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status ' + \
'us=$upstream_status $upstream_response_length'
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="2.001, 0.345" cs=MISS ' + \
'us=200 20'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.upstreams(NginxAccessLogParser(log_format).parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, has_item('counter'))
assert_that(metrics, has_item('average'))
assert_that(metrics, has_item('timer'))
# counters
counters = metrics['counter']
assert_that(counters, has_item('nginx.upstream.request.count'))
assert_that(counters, has_item('nginx.upstream.next.count'))
assert_that(counters, has_item('nginx.cache.miss'))
assert_that(counters, has_item('nginx.upstream.status.2xx'))
assert_that(counters['nginx.upstream.request.count'][0][1], equal_to(1))
assert_that(counters['nginx.upstream.next.count'][0][1], equal_to(1))
assert_that(counters['nginx.upstream.status.2xx'][0][1], equal_to(1))
# averages
averages = metrics['average']
assert_that(averages, has_item('nginx.upstream.response.length'))
assert_that(averages['nginx.upstream.response.length'][0], equal_to(20))
# histogram
histogram = metrics['timer']
assert_that(histogram, has_item('nginx.upstream.response.time'))
assert_that(histogram['nginx.upstream.response.time'], equal_to([2.001 + 0.345]))
def test_upstream_status_and_length2(self):
"""
Test 3XX status for response length as well.
"""
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status ' + \
'us=$upstream_status $upstream_response_length'
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="2.001, 0.345" cs=MISS ' + \
'us=300 40'
# run single method
collector = NginxAccessLogsCollector(object=self.fake_object, tail=[])
collector.upstreams(NginxAccessLogParser(log_format).parse(line))
# check
metrics = self.fake_object.statsd.current
assert_that(metrics, has_item('counter'))
assert_that(metrics, has_item('timer'))
# counters
counters = metrics['counter']
assert_that(counters, has_item('nginx.upstream.request.count'))
assert_that(counters, has_item('nginx.upstream.next.count'))
assert_that(counters, has_item('nginx.cache.miss'))
assert_that(counters, has_item('nginx.upstream.status.3xx'))
assert_that(counters['nginx.upstream.request.count'][0][1], equal_to(1))
assert_that(counters['nginx.upstream.next.count'][0][1], equal_to(1))
assert_that(counters['nginx.upstream.status.3xx'][0][1], equal_to(1))
# averages
averages = metrics['average']
assert_that(averages, has_item('nginx.upstream.response.length'))
assert_that(averages['nginx.upstream.response.length'][0], equal_to(40))
# histogram
histogram = metrics['timer']
assert_that(histogram, has_item('nginx.upstream.response.time'))
assert_that(histogram['nginx.upstream.response.time'], equal_to([2.001 + 0.345]))
| 45.906897 | 120 | 0.633666 | 1,677 | 13,313 | 4.841383 | 0.100775 | 0.083754 | 0.084247 | 0.049144 | 0.910087 | 0.907008 | 0.894568 | 0.862668 | 0.862668 | 0.856017 | 0 | 0.053807 | 0.219635 | 13,313 | 289 | 121 | 46.065744 | 0.727693 | 0.037257 | 0 | 0.702564 | 0 | 0.102564 | 0.347076 | 0.154413 | 0 | 0 | 0 | 0 | 0.348718 | 1 | 0.051282 | false | 0 | 0.025641 | 0 | 0.082051 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
99a1c985a765bc0b34c32e913761fb9261cc0cd5 | 4,663 | py | Python | test/api/test_associations_to_all_features2.py | xu-hao/ddcr-api | f69c80a84d413078bd36985b6579d2bc32329b8f | [
"MIT"
] | null | null | null | test/api/test_associations_to_all_features2.py | xu-hao/ddcr-api | f69c80a84d413078bd36985b6579d2bc32329b8f | [
"MIT"
] | null | null | null | test/api/test_associations_to_all_features2.py | xu-hao/ddcr-api | f69c80a84d413078bd36985b6579d2bc32329b8f | [
"MIT"
] | null | null | null | """Test API."""
from fastapi.testclient import TestClient
import pytest
from icees_api.app import APP
from ..util import load_data
testclient = TestClient(APP)
table = "patient"
year = 2010
age_levels = [
'0-2',
'3-17',
'18-34',
'35-50',
'51-69',
'70-89',
]
@load_data(
APP,
"""
PatientId,year,AgeStudyStart,Albuterol,AvgDailyPM2.5Exposure,EstResidentialDensity,AsthmaDx
varchar(255),int,varchar(255),varchar(255),int,int,int
1,2010,0-2,0,1,0,1
2,2010,0-2,1,1,0,1
3,2010,0-2,>1,1,0,1
4,2010,0-2,0,2,0,1
5,2010,0-2,1,2,0,1
6,2010,0-2,>1,2,0,1
7,2010,0-2,0,3,0,1
8,2010,0-2,1,3,0,1
9,2010,0-2,>1,3,0,1
10,2010,0-2,0,4,0,1
11,2010,0-2,1,4,0,1
12,2010,0-2,>1,4,0,1
13,2010,3-17,>1,4,0,1
14,2010,18-34,>1,4,0,1
15,2010,35-50,>1,4,0,1
16,2010,51-69,>1,4,0,1
17,2010,70-89,>1,4,0,1
""",
"""
cohort_id,size,features,table,year
COHORT:1,17,"{}",patient,2010
"""
)
def test_associations_to_all_features2_explicit():
cohort_id = "COHORT:1"
atafdata = {
"feature": {
"feature_name": "AgeStudyStart",
"feature_qualifiers": list(map(lambda x: {
"operator": "=",
"value": x
}, age_levels))
},
"maximum_p_value": 1
}
resp = testclient.post(
f"/{table}/cohort/{cohort_id}/associations_to_all_features2",
json=atafdata,
)
resp_json = resp.json()
assert "return value" in resp_json
assert isinstance(resp_json["return value"], list)
@load_data(
APP,
"""
PatientId,year,AgeStudyStart,Albuterol,AvgDailyPM2.5Exposure,EstResidentialDensity,AsthmaDx
varchar(255),int,varchar(255),varchar(255),int,int,int
1,2010,0-2,0,1,0,1
2,2010,0-2,1,1,0,1
3,2010,0-2,>1,1,0,1
4,2010,0-2,0,2,0,1
5,2010,0-2,1,2,0,1
6,2010,0-2,>1,2,0,1
7,2010,0-2,0,3,0,1
8,2010,0-2,1,3,0,1
9,2010,0-2,>1,3,0,1
10,2010,0-2,0,4,0,1
11,2010,0-2,1,4,0,1
12,2010,0-2,>1,4,0,1
13,2010,3-17,>1,4,0,1
14,2010,18-34,>1,4,0,1
15,2010,35-50,>1,4,0,1
16,2010,51-69,>1,4,0,1
17,2010,70-89,>1,4,0,1
""",
"""
cohort_id,size,features,table,year
COHORT:1,17,"{}",patient,2010
"""
)
def test_associations_to_all_features2():
cohort_id = "COHORT:1"
atafdata = {
"feature": {
"AgeStudyStart": list(map(lambda x: {
"operator": "=",
"value": x
}, age_levels))
},
"maximum_p_value": 1
}
resp = testclient.post(
f"/{table}/cohort/{cohort_id}/associations_to_all_features2",
json=atafdata,
)
resp_json = resp.json()
assert "return value" in resp_json
assert isinstance(resp_json["return value"], list)
@load_data(
APP,
"""
PatientId,year,AgeStudyStart,Albuterol,AvgDailyPM2.5Exposure,EstResidentialDensity,AsthmaDx
varchar(255),int,varchar(255),varchar(255),int,int,int
1,2010,0-2,0,1,0,1
2,2010,0-2,1,1,0,1
3,2010,0-2,>1,1,0,1
4,2010,0-2,0,2,0,1
5,2010,0-2,1,2,0,1
6,2010,0-2,>1,2,0,1
7,2010,0-2,0,3,0,1
8,2010,0-2,1,3,0,1
9,2010,0-2,>1,3,0,1
10,2010,0-2,0,4,0,1
11,2010,0-2,1,4,0,1
12,2010,0-2,>1,4,0,1
13,2010,3-17,>1,4,0,1
14,2010,18-34,>1,4,0,1
15,2010,35-50,>1,4,0,1
16,2010,51-69,>1,4,0,1
17,2010,70-89,>1,4,0,1
""",
"""
cohort_id,size,features,table,year
COHORT:1,17,"{}",patient,2010
"""
)
def test_associations_to_all_features2b():
cohort_id = "COHORT:1"
atafdata = {
"feature": {
"AgeStudyStart": [
{
"operator": "=",
"value": "0-2"
}, {
"operator": "in",
"values": ["3-17", "18-34"]
}, {
"operator": "in",
"values": ["35-50", "51-69"]
}, {
"operator": "=",
"value": "70-89"
}
]
},
"maximum_p_value": 1
}
resp = testclient.post(
f"/{table}/cohort/{cohort_id}/associations_to_all_features2",
json=atafdata,
)
resp_json = resp.json()
assert "return value" in resp_json
assert isinstance(resp_json["return value"], list)
| 26.494318 | 99 | 0.500751 | 734 | 4,663 | 3.104905 | 0.117166 | 0.047389 | 0.094778 | 0.073717 | 0.862659 | 0.862659 | 0.849495 | 0.81176 | 0.81176 | 0.81176 | 0 | 0.227387 | 0.317178 | 4,663 | 175 | 100 | 26.645714 | 0.488379 | 0.00193 | 0 | 0.526882 | 0 | 0 | 0.231221 | 0.071758 | 0 | 0 | 0 | 0 | 0.064516 | 1 | 0.032258 | false | 0 | 0.043011 | 0 | 0.075269 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
41f5c771c2ea0c2ca50d4afb0866dc9b8b1827ae | 10,091 | py | Python | stage/configuration/test_salesforce_origin.py | Sentienz/datacollector-tests | ca27988351dc3366488098b5db6c85a8be2f7b85 | [
"Apache-2.0"
] | null | null | null | stage/configuration/test_salesforce_origin.py | Sentienz/datacollector-tests | ca27988351dc3366488098b5db6c85a8be2f7b85 | [
"Apache-2.0"
] | 1 | 2019-04-24T11:06:38.000Z | 2019-04-24T11:06:38.000Z | stage/configuration/test_salesforce_origin.py | anubandhan/datacollector-tests | 301c024c66d68353735256b262b681dd05ba16cc | [
"Apache-2.0"
] | 2 | 2019-05-24T06:34:37.000Z | 2020-03-30T11:48:18.000Z | import pytest
from streamsets.testframework.decorators import stub
@stub
def test_api_version(sdc_builder, sdc_executor):
pass
@stub
def test_auth_endpoint(sdc_builder, sdc_executor):
pass
@stub
def test_batch_wait_time_in_ms(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'subscribe_for_notifications': True, 'subscription_type': 'CDC'}])
def test_change_data_capture_object(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True,
'use_bulk_api': True,
'use_pk_chunking': True}])
def test_chunk_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'create_salesforce_attributes': False},
{'create_salesforce_attributes': True}])
def test_create_salesforce_attributes(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'disable_query_validation': False}, {'disable_query_validation': True}])
def test_disable_query_validation(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'include_deleted_records': False, 'query_existing_data': True},
{'include_deleted_records': True, 'query_existing_data': True}])
def test_include_deleted_records(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True}])
def test_initial_offset(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_mutual_authentication': True}])
def test_keystore_file(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_mutual_authentication': True}])
def test_keystore_key_algorithm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_mutual_authentication': True}])
def test_keystore_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'keystore_type': 'JKS', 'use_mutual_authentication': True},
{'keystore_type': 'PKCS12', 'use_mutual_authentication': True}])
def test_keystore_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_max_batch_size_in_records(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'mismatched_types_behavior': 'PRESERVE_DATA'},
{'mismatched_types_behavior': 'ROUND_DATA'},
{'mismatched_types_behavior': 'TRUNCATE_DATA'}])
def test_mismatched_types_behavior(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True}])
def test_offset_field(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_password(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'subscribe_for_notifications': True,
'subscription_type': 'PLATFORM_EVENT'}])
def test_platform_event_api_name(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_proxy': True}])
def test_proxy_hostname(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'proxy_requires_credentials': True, 'use_proxy': True}])
def test_proxy_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_proxy': True}])
def test_proxy_port(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'proxy_requires_credentials': True, 'use_proxy': True}])
def test_proxy_realm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'proxy_requires_credentials': False, 'use_proxy': True},
{'proxy_requires_credentials': True, 'use_proxy': True}])
def test_proxy_requires_credentials(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'proxy_requires_credentials': True, 'use_proxy': True}])
def test_proxy_username(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'subscribe_for_notifications': True, 'subscription_type': 'PUSH_TOPIC'}])
def test_push_topic(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': False}, {'query_existing_data': True}])
def test_query_existing_data(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True,
'repeat_query': 'FULL',
'subscribe_for_notifications': False},
{'query_existing_data': True,
'repeat_query': 'INCREMENTAL',
'subscribe_for_notifications': False}])
def test_query_interval(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True,
'repeat_query': 'FULL',
'subscribe_for_notifications': False},
{'query_existing_data': True,
'repeat_query': 'INCREMENTAL',
'subscribe_for_notifications': False},
{'query_existing_data': True,
'repeat_query': 'NO_REPEAT',
'subscribe_for_notifications': False}])
def test_repeat_query(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'replay_option': 'ALL_EVENTS',
'subscribe_for_notifications': True,
'subscription_type': 'PLATFORM_EVENT'},
{'replay_option': 'NEW_EVENTS',
'subscribe_for_notifications': True,
'subscription_type': 'PLATFORM_EVENT'}])
def test_replay_option(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'create_salesforce_attributes': True}])
def test_salesforce_attribute_prefix(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True}])
def test_soql_query(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True,
'use_bulk_api': True,
'use_pk_chunking': True}])
def test_start_id(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'subscribe_for_notifications': False},
{'subscribe_for_notifications': True}])
def test_subscribe_for_notifications(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'subscribe_for_notifications': True, 'subscription_type': 'CDC'},
{'subscribe_for_notifications': True,
'subscription_type': 'PLATFORM_EVENT'},
{'subscribe_for_notifications': True, 'subscription_type': 'PUSH_TOPIC'}])
def test_subscription_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True, 'use_bulk_api': False},
{'query_existing_data': True, 'use_bulk_api': True}])
def test_use_bulk_api(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_mutual_authentication': False}, {'use_mutual_authentication': True}])
def test_use_mutual_authentication(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'query_existing_data': True,
'use_bulk_api': True,
'use_pk_chunking': False},
{'query_existing_data': True,
'use_bulk_api': True,
'use_pk_chunking': True}])
def test_use_pk_chunking(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_proxy': False}, {'use_proxy': True}])
def test_use_proxy(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_username(sdc_builder, sdc_executor):
pass
| 36.039286 | 121 | 0.634526 | 1,031 | 10,091 | 5.798254 | 0.102813 | 0.170626 | 0.086986 | 0.140515 | 0.831884 | 0.812981 | 0.782536 | 0.77551 | 0.753764 | 0.733188 | 0 | 0.000267 | 0.259043 | 10,091 | 279 | 122 | 36.168459 | 0.799251 | 0 | 0 | 0.619289 | 0 | 0 | 0.254609 | 0.095441 | 0 | 0 | 0 | 0 | 0 | 1 | 0.203046 | false | 0.218274 | 0.010152 | 0 | 0.213198 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
5136e441126646e34ba4e59353a81df48f02000a | 4,445 | py | Python | stubs.min/Autodesk/Revit/DB/__init___parts/UnitFormatUtils.py | ricardyn/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | 1 | 2021-02-02T13:39:16.000Z | 2021-02-02T13:39:16.000Z | stubs.min/Autodesk/Revit/DB/__init___parts/UnitFormatUtils.py | hdm-dt-fb/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | null | null | null | stubs.min/Autodesk/Revit/DB/__init___parts/UnitFormatUtils.py | hdm-dt-fb/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | null | null | null | class UnitFormatUtils(object):
""" A utility class for formatting and parsing numbers with units. """
@staticmethod
def Format(units,unitType,value,maxAccuracy,forEditing,formatValueOptions=None):
"""
Format(units: Units,unitType: UnitType,value: float,maxAccuracy: bool,forEditing: bool,formatValueOptions: FormatValueOptions) -> str
Formats a number with units into a string.
units: The units formatting settings,typically obtained from
Autodesk.Revit.DB.Document.GetUnitsDocument.GetUnits().
unitType: The unit type of the value to format.
value: The value to format,in Revit's internal units.
maxAccuracy: True if the value should be rounded to an increased accuracy level appropriate
for editing or understanding the precise value stored in the model. False if
the accuracy specified by the FormatOptions should be used,appropriate for
printed drawings.
forEditing: True if the formatting should be modified as necessary so that the formatted
string can be successfully parsed,for example by suppressing digit grouping.
False if unmodified settings should be used,suitable for display only.
formatValueOptions: Additional formatting options.
Returns: The formatted string.
Format(units: Units,unitType: UnitType,value: float,maxAccuracy: bool,forEditing: bool) -> str
Formats a number with units into a string.
units: The units formatting settings,typically obtained from
Autodesk.Revit.DB.Document.GetUnitsDocument.GetUnits().
unitType: The unit type of the value to format.
value: The value to format,in Revit's internal units.
maxAccuracy: True if the value should be rounded to an increased accuracy level appropriate
for editing or understanding the precise value stored in the model. False if
the accuracy specified by the FormatOptions should be used,appropriate for
printed drawings.
forEditing: True if the formatting should be modified as necessary so that the formatted
string can be successfully parsed,for example by suppressing digit grouping.
False if unmodified settings should be used,suitable for display only.
Returns: The formatted string.
"""
pass
@staticmethod
def TryParse(units,unitType,stringToParse,*__args):
"""
TryParse(units: Units,unitType: UnitType,stringToParse: str,valueParsingOptions: ValueParsingOptions) -> (bool,float,str)
Parses a formatted string into a number with units if possible.
units: The units formatting settings,typically obtained from
Autodesk.Revit.DB.Document.GetUnitsDocument.GetUnits().
unitType: The target unit type for the value.
stringToParse: The string to parse.
valueParsingOptions: Additional parsing options.
Returns: True if the string can be parsed,false otherwise.
TryParse(units: Units,unitType: UnitType,stringToParse: str,valueParsingOptions: ValueParsingOptions) -> (bool,float)
Parses a formatted string into a number with units if possible.
units: The units formatting settings,typically obtained from
Autodesk.Revit.DB.Document.GetUnitsDocument.GetUnits().
unitType: The target unit type for the value.
stringToParse: The string to parse.
valueParsingOptions: Additional parsing options.
Returns: True if the string can be parsed,false otherwise.
TryParse(units: Units,unitType: UnitType,stringToParse: str) -> (bool,float,str)
Parses a formatted string into a number with units if possible.
units: The units formatting settings,typically obtained from
Autodesk.Revit.DB.Document.GetUnitsDocument.GetUnits().
unitType: The target unit type for the value.
stringToParse: The string to parse.
Returns: True if the string can be parsed,false otherwise.
TryParse(units: Units,unitType: UnitType,stringToParse: str) -> (bool,float)
Parses a formatted string into a number with units if possible.
units: The units formatting settings,typically obtained from
Autodesk.Revit.DB.Document.GetUnitsDocument.GetUnits().
unitType: The target unit type for the value.
stringToParse: The string to parse.
Returns: True if the string can be parsed,false otherwise.
"""
pass
__all__=[
'Format',
'TryParse',
]
| 44.89899 | 137 | 0.728234 | 549 | 4,445 | 5.885246 | 0.182149 | 0.02476 | 0.022284 | 0.048282 | 0.885794 | 0.885794 | 0.885794 | 0.885794 | 0.885794 | 0.885794 | 0 | 0 | 0.212148 | 4,445 | 98 | 138 | 45.357143 | 0.922616 | 0.877615 | 0 | 0.363636 | 0 | 0 | 0.054264 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0.181818 | 0 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
850102c7a766124e7e1aaa2c2b4e900d54e26b14 | 10,076 | py | Python | tests/test_filters.py | Acidburn0zzz/dci-downloader | 0540b5414dd1ccd04f4c2c9cb9f8a7c39826156d | [
"Apache-2.0"
] | 1 | 2020-01-12T05:27:08.000Z | 2020-01-12T05:27:08.000Z | tests/test_filters.py | Acidburn0zzz/dci-downloader | 0540b5414dd1ccd04f4c2c9cb9f8a7c39826156d | [
"Apache-2.0"
] | null | null | null | tests/test_filters.py | Acidburn0zzz/dci-downloader | 0540b5414dd1ccd04f4c2c9cb9f8a7c39826156d | [
"Apache-2.0"
] | null | null | null | from dci_downloader.filters import filter_files_list
from dci_downloader.settings import get_settings
def test_default_filter_files_list():
dci_files_list = {
"directories": [],
"files": [
{
"path": "",
"sha256": "954719cab91afac5bc142656afff86e6d8e87570b035cbce65dbbb84892a40d3",
"name": ".composeinfo",
"size": 14496,
},
{
"path": "AppStream/x86_64/debug/tree/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.x86_64.rpm",
"size": 45052,
},
{
"path": "AppStream/x86_64/os/Packages",
"sha256": "8fe293470f677bfc6eb04204c47b5e1a0e5d15431ef7ed9dbb269aaea386ed9f",
"name": "PackageKit-command-not-found-1.1.12-2.el8.x86_64.rpm",
"size": 28616,
},
{
"path": "BaseOS/x86_64/os/Packages",
"sha256": "7949b18b6d359b435686f2f5781928675ec8b2872b96f0abf6ba10747f794694",
"name": "avahi-libs-0.7-19.el8.i686.rpm",
"size": 68920,
},
{
"path": "AppStream/s390x/os/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.s390x.rpm",
"size": 29562,
},
{
"path": "AppStream/x86_64/os",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": ".treeinfo",
"size": 29562,
},
],
"symlinks": [],
}
settings = get_settings(sys_args=["RHEL-8", "/tmp"])["topics"][0]
expected_files_list = {
"directories": [],
"files": [
{
"path": "",
"sha256": "954719cab91afac5bc142656afff86e6d8e87570b035cbce65dbbb84892a40d3",
"name": ".composeinfo",
"size": 14496,
},
{
"path": "AppStream/x86_64/os/Packages",
"sha256": "8fe293470f677bfc6eb04204c47b5e1a0e5d15431ef7ed9dbb269aaea386ed9f",
"name": "PackageKit-command-not-found-1.1.12-2.el8.x86_64.rpm",
"size": 28616,
},
{
"path": "BaseOS/x86_64/os/Packages",
"sha256": "7949b18b6d359b435686f2f5781928675ec8b2872b96f0abf6ba10747f794694",
"name": "avahi-libs-0.7-19.el8.i686.rpm",
"size": 68920,
},
{
"path": "AppStream/x86_64/os",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": ".treeinfo",
"size": 29562,
},
],
"symlinks": [],
}
assert filter_files_list(dci_files_list, settings) == expected_files_list
def test_filter_files_list_with_debug():
dci_files_list = {
"directories": [],
"files": [
{
"path": "AppStream/x86_64/debug/tree/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.x86_64.rpm",
"size": 45052,
},
{
"path": "AppStream/x86_64/os/Packages",
"sha256": "8fe293470f677bfc6eb04204c47b5e1a0e5d15431ef7ed9dbb269aaea386ed9f",
"name": "PackageKit-command-not-found-1.1.12-2.el8.x86_64.rpm",
"size": 28616,
},
],
"symlinks": [],
}
settings = get_settings(
sys_args=["RHEL-8", "/tmp", "--variant", "AppStream", "--debug"]
)["topics"][0]
expected_files_list = {
"directories": [],
"files": [
{
"path": "AppStream/x86_64/debug/tree/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.x86_64.rpm",
"size": 45052,
},
{
"path": "AppStream/x86_64/os/Packages",
"sha256": "8fe293470f677bfc6eb04204c47b5e1a0e5d15431ef7ed9dbb269aaea386ed9f",
"name": "PackageKit-command-not-found-1.1.12-2.el8.x86_64.rpm",
"size": 28616,
},
],
"symlinks": [],
}
assert filter_files_list(dci_files_list, settings) == expected_files_list
def test_non_existing_variants_are_ignored():
dci_files_list = {
"directories": [],
"files": [
{
"path": "",
"sha256": "954719cab91afac5bc142656afff86e6d8e87570b035cbce65dbbb84892a40d3",
"name": ".composeinfo",
"size": 14496,
},
{
"path": "AppStream/x86_64/debug/tree/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.x86_64.rpm",
"size": 45052,
},
{
"path": "AppStream/x86_64/os/Packages",
"sha256": "8fe293470f677bfc6eb04204c47b5e1a0e5d15431ef7ed9dbb269aaea386ed9f",
"name": "PackageKit-command-not-found-1.1.12-2.el8.x86_64.rpm",
"size": 28616,
},
{
"path": "BaseOS/x86_64/os/Packages",
"sha256": "7949b18b6d359b435686f2f5781928675ec8b2872b96f0abf6ba10747f794694",
"name": "avahi-libs-0.7-19.el8.i686.rpm",
"size": 68920,
},
{
"path": "AppStream/s390x/os/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.s390x.rpm",
"size": 29562,
},
],
"symlinks": [],
}
settings = get_settings(sys_args=["RHEL-8", "/tmp", "--variant", "Server"])[
"topics"
][0]
expected_files_list = {
"directories": [],
"files": [
{
"path": "",
"sha256": "954719cab91afac5bc142656afff86e6d8e87570b035cbce65dbbb84892a40d3",
"name": ".composeinfo",
"size": 14496,
}
],
"symlinks": [],
}
assert filter_files_list(dci_files_list, settings) == expected_files_list
def test_filter_files_list_download_everything():
dci_files_list = {
"directories": [],
"files": [
{
"path": "",
"sha256": "954719cab91afac5bc142656afff86e6d8e87570b035cbce65dbbb84892a40d3",
"name": ".composeinfo",
"size": 14496,
},
{
"path": "AppStream/x86_64/debug/tree/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.x86_64.rpm",
"size": 45052,
},
{
"path": "AppStream/x86_64/os/Packages",
"sha256": "8fe293470f677bfc6eb04204c47b5e1a0e5d15431ef7ed9dbb269aaea386ed9f",
"name": "PackageKit-command-not-found-1.1.12-2.el8.x86_64.rpm",
"size": 28616,
},
{
"path": "BaseOS/x86_64/os/Packages",
"sha256": "7949b18b6d359b435686f2f5781928675ec8b2872b96f0abf6ba10747f794694",
"name": "avahi-libs-0.7-19.el8.i686.rpm",
"size": 68920,
},
{
"path": "AppStream/s390x/os/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.s390x.rpm",
"size": 29562,
},
],
"symlinks": [],
}
settings = get_settings(sys_args=["RHEL-8", "/tmp", "--all"])["topics"][0]
expected_files_list = {
"directories": [],
"files": [
{
"path": "",
"sha256": "954719cab91afac5bc142656afff86e6d8e87570b035cbce65dbbb84892a40d3",
"name": ".composeinfo",
"size": 14496,
},
{
"path": "AppStream/x86_64/debug/tree/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.x86_64.rpm",
"size": 45052,
},
{
"path": "AppStream/x86_64/os/Packages",
"sha256": "8fe293470f677bfc6eb04204c47b5e1a0e5d15431ef7ed9dbb269aaea386ed9f",
"name": "PackageKit-command-not-found-1.1.12-2.el8.x86_64.rpm",
"size": 28616,
},
{
"path": "BaseOS/x86_64/os/Packages",
"sha256": "7949b18b6d359b435686f2f5781928675ec8b2872b96f0abf6ba10747f794694",
"name": "avahi-libs-0.7-19.el8.i686.rpm",
"size": 68920,
},
{
"path": "AppStream/s390x/os/Packages",
"sha256": "6f48f0d285918e502035da74decf447c6bb29898206406a4ed6a92ece94d276a",
"name": "PackageKit-command-not-found-debuginfo-1.1.12-2.el8.s390x.rpm",
"size": 29562,
},
],
"symlinks": [],
}
assert filter_files_list(dci_files_list, settings) == expected_files_list
| 39.513725 | 93 | 0.52104 | 738 | 10,076 | 6.971545 | 0.100271 | 0.03207 | 0.069388 | 0.0793 | 0.965015 | 0.965015 | 0.961516 | 0.961516 | 0.958601 | 0.958601 | 0 | 0.266434 | 0.34478 | 10,076 | 254 | 94 | 39.669291 | 0.512875 | 0 | 0 | 0.666667 | 0 | 0.069106 | 0.462882 | 0.366713 | 0 | 0 | 0 | 0 | 0.01626 | 1 | 0.01626 | false | 0 | 0.00813 | 0 | 0.02439 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
51d235ebe06ff599f13347c9dc918148866d496d | 181 | py | Python | lambda-ec2-tagging-monitor/classes/__init__.py | chadbartel/My-Serverless-Sandbox | cd3c3f861ff81777f1cbf33a58fdc02b733d49ec | [
"CC0-1.0"
] | null | null | null | lambda-ec2-tagging-monitor/classes/__init__.py | chadbartel/My-Serverless-Sandbox | cd3c3f861ff81777f1cbf33a58fdc02b733d49ec | [
"CC0-1.0"
] | null | null | null | lambda-ec2-tagging-monitor/classes/__init__.py | chadbartel/My-Serverless-Sandbox | cd3c3f861ff81777f1cbf33a58fdc02b733d49ec | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python
import sys
sys.path.append(".")
sys.path.append("..")
from classes.criteria import Criteria
from classes.ec2 import EC2Client
from classes.hunter import Hunter | 25.857143 | 37 | 0.779006 | 27 | 181 | 5.222222 | 0.518519 | 0.234043 | 0.184397 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012195 | 0.093923 | 181 | 7 | 38 | 25.857143 | 0.847561 | 0.110497 | 0 | 0 | 0 | 0 | 0.018634 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
cfcba43948e9909f132ab85bb9041b1f6696ee34 | 101 | py | Python | tests/units/tournaments/engines.py | happz/settlers | 961a6d2121ab6e89106f17017f026c60c77f16f9 | [
"MIT"
] | 1 | 2018-11-16T09:41:31.000Z | 2018-11-16T09:41:31.000Z | tests/units/tournaments/engines.py | happz/settlers | 961a6d2121ab6e89106f17017f026c60c77f16f9 | [
"MIT"
] | 15 | 2015-01-07T14:17:36.000Z | 2019-04-29T13:26:43.000Z | tests/units/tournaments/engines.py | happz/settlers | 961a6d2121ab6e89106f17017f026c60c77f16f9 | [
"MIT"
] | null | null | null | import tests
import tests.units.tournaments
import tournaments
class Tests(tests.TestCase):
pass
| 12.625 | 30 | 0.811881 | 13 | 101 | 6.307692 | 0.538462 | 0.268293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128713 | 101 | 7 | 31 | 14.428571 | 0.931818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.2 | 0.6 | 0 | 0.8 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
32051a2ff094a5642edb95ebe6de0fd02aef31a3 | 91 | py | Python | dogebuild_c/loader.py | dogebuild/dogebuild-c | 02aa74f2ac112f6c4dd064846f4f78a38c6930bf | [
"MIT"
] | null | null | null | dogebuild_c/loader.py | dogebuild/dogebuild-c | 02aa74f2ac112f6c4dd064846f4f78a38c6930bf | [
"MIT"
] | null | null | null | dogebuild_c/loader.py | dogebuild/dogebuild-c | 02aa74f2ac112f6c4dd064846f4f78a38c6930bf | [
"MIT"
] | null | null | null | from dogebuild_c.c_plugin import CPlugin
def get(**kwargs):
return CPlugin(**kwargs)
| 15.166667 | 40 | 0.736264 | 13 | 91 | 5 | 0.769231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153846 | 91 | 5 | 41 | 18.2 | 0.844156 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
3208103ce40fdcb0ad5942c354a0129a5118d061 | 7,082 | py | Python | qa327_test/frontend/test_sell.py | awebsters/SeetGeak-Quality-Assurance-Example | 65272068d8fe81266efb0b8528bac339fb063891 | [
"MIT"
] | null | null | null | qa327_test/frontend/test_sell.py | awebsters/SeetGeak-Quality-Assurance-Example | 65272068d8fe81266efb0b8528bac339fb063891 | [
"MIT"
] | null | null | null | qa327_test/frontend/test_sell.py | awebsters/SeetGeak-Quality-Assurance-Example | 65272068d8fe81266efb0b8528bac339fb063891 | [
"MIT"
] | null | null | null | from time import sleep
import pytest
from seleniumbase import BaseCase
from qa327_test.conftest import base_url
from unittest.mock import patch
from qa327.models import db, User
from werkzeug.security import generate_password_hash, check_password_hash
# Moch a sample user
test_user = User(
email='test_frontend@test.com',
name='test_frontend',
password=generate_password_hash('Test1234!', method='sha256'),
balance=5000
)
class SellPageTest(BaseCase):
@patch('qa327.backend.get_user', return_value=test_user)
@patch('qa327.backend.create_ticket', return_value=None)
def test_ticket_name_space(self, *_):
"""
R4.1.1 - Check that the name is only made up of numbers and letters, and is only allowed a space at the beginning or end
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open login page
self.open(base_url + '/login')
# Fill in form
self.type("#email", "test_frontend@test.com")
self.type("#password", "Test1234!")
# Submit
self.click('input[type="submit"]')
self.type("#name", "+")
self.type("#quantity", 1)
self.type("#price", 20)
self.execute_script("document.querySelector('#date').setAttribute('value', '{}')".format('2020-09-01'))
self.click('input[value="Sell Ticket"]')
self.assert_text("Name can only contain alphanumeric characters", "#message")
@patch('qa327.backend.get_user', return_value=test_user)
@patch('qa327.backend.create_ticket', return_value=None)
def test_ticket_name_length(self, *_):
"""
R4.2.1 - Check that the name is less than 60 characters long
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open login page
self.open(base_url + '/login')
# Fill in form
self.type("#email", "test_frontend@test.com")
self.type("#password", "Test1234!")
# Submit
self.click('input[type="submit"]')
self.type("#name", 6*"Teeeeeeeeest")
self.type("#quantity", 1)
self.type("#price", 20)
self.execute_script("document.querySelector('#date').setAttribute('value', '{}')".format('2020-09-01'))
self.click('input[value="Sell Ticket"]')
self.assert_text("Name is too long, it must be shorter than 60 characters", "#message")
@patch('qa327.backend.get_user', return_value=test_user)
@patch('qa327.backend.create_ticket', return_value=None)
def test_ticket_quantity_range_lower(self, *_):
"""
R4.3.1 - Check failure for quantity of 0
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open login page
self.open(base_url + '/login')
# Fill in form
self.type("#email", "test_frontend@test.com")
self.type("#password", "Test1234!")
# Submit
self.click('input[type="submit"]')
self.type("#name", "Test")
self.type("#quantity", 0)
self.type("#price", 20)
self.execute_script("document.querySelector('#date').setAttribute('value', '{}')".format('2020-09-01'))
self.click('input[value="Sell Ticket"]')
self.assert_text("Quantity must be greater than 0 and less than or equal to 100", "#message")
@patch('qa327.backend.get_user', return_value=test_user)
@patch('qa327.backend.create_ticket', return_value=None)
def test_ticket_quantity_range_upper(self, *_):
"""
R4.3.2 - Check failure for quantity of 101
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open login page
self.open(base_url + '/login')
# Fill in form
self.type("#email", "test_frontend@test.com")
self.type("#password", "Test1234!")
# Submit
self.click('input[type="submit"]')
self.type("#name", "Test")
self.type("#quantity", 101)
self.type("#price", 20)
self.execute_script("document.querySelector('#date').setAttribute('value', '{}')".format('2020-09-01'))
self.click('input[value="Sell Ticket"]')
self.assert_text("Quantity must be greater than 0 and less than or equal to 100", "#message")
@patch('qa327.backend.get_user', return_value=test_user)
@patch('qa327.backend.create_ticket', return_value=None)
def test_ticket_price_range_lower(self, *_):
"""
R4.4.1 - Check failure for range < 10
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open login page
self.open(base_url + '/login')
# Fill in form
self.type("#email", "test_frontend@test.com")
self.type("#password", "Test1234!")
# Submit
self.click('input[type="submit"]')
self.type("#name", "Test")
self.type("#quantity", 1)
self.type("#price", 9)
self.execute_script("document.querySelector('#date').setAttribute('value', '{}')".format('2020-09-01'))
self.click('input[value="Sell Ticket"]')
self.assert_text("Price must be greater than or equal to 10 and less than or equal to 100", "#message")
@patch('qa327.backend.get_user', return_value=test_user)
@patch('qa327.backend.create_ticket', return_value=None)
def test_ticket_price_range_upper(self, *_):
"""
R4.4.2 - Check failure for range > 100
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open login page
self.open(base_url + '/login')
# Fill in form
self.type("#email", "test_frontend@test.com")
self.type("#password", "Test1234!")
# Submit
self.click('input[type="submit"]')
self.type("#name", "Test")
self.type("#quantity", 1)
self.type("#price", 101)
self.execute_script("document.querySelector('#date').setAttribute('value', '{}')".format('2020-09-01'))
self.click('input[value="Sell Ticket"]')
self.assert_text("Price must be greater than or equal to 10 and less than or equal to 100", "#message")
@patch('qa327.backend.get_user', return_value=test_user)
@patch('qa327.backend.create_ticket', return_value=None)
def test_ticket_user_profile(self, *_):
"""
R4.5.1 - Check Success for date in correct format
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open login page
self.open(base_url + '/login')
# Fill in form
self.type("#email", "test_frontend@test.com")
self.type("#password", "Test1234!")
# Submit
self.click('input[type="submit"]')
self.type("#name", "Test")
self.type("#quantity", 1)
self.type("#price", 20)
self.execute_script("document.querySelector('#date').setAttribute('value', '{}')".format('2020-09-01'))
self.click('input[value="Sell Ticket"]')
| 33.72381 | 128 | 0.609997 | 888 | 7,082 | 4.738739 | 0.149775 | 0.06654 | 0.056559 | 0.049905 | 0.836264 | 0.818679 | 0.809648 | 0.809648 | 0.809648 | 0.809648 | 0 | 0.040488 | 0.236233 | 7,082 | 209 | 129 | 33.885167 | 0.737475 | 0.127365 | 0 | 0.738739 | 0 | 0 | 0.367101 | 0.14878 | 0 | 0 | 0 | 0 | 0.054054 | 1 | 0.063063 | false | 0.081081 | 0.063063 | 0 | 0.135135 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
3217c92c7e6707a34847ce091f64819b3480af4b | 33,575 | py | Python | HYDRA_Step2/MRF_FullNL_ResCNN_T1T2_L1000_Test.py | P-Song/HYDRA | b91b3decc622a5b95742dc477988cf8844d1c5c2 | [
"MIT"
] | 6 | 2020-06-26T11:26:40.000Z | 2022-01-17T11:28:59.000Z | HYDRA_Step2/MRF_FullNL_ResCNN_T1T2_L1000_Test.py | P-Song/HYDRA | b91b3decc622a5b95742dc477988cf8844d1c5c2 | [
"MIT"
] | null | null | null | HYDRA_Step2/MRF_FullNL_ResCNN_T1T2_L1000_Test.py | P-Song/HYDRA | b91b3decc622a5b95742dc477988cf8844d1c5c2 | [
"MIT"
] | 2 | 2020-08-19T15:42:25.000Z | 2020-08-30T09:24:26.000Z |
# coding: utf-8
'''
The software is for the paper "HYDRA: Hybrid deep magnetic resonance fingerprinting". The source codes are freely available for research and study purposes.
Purpose:
Magnetic resonance fingerprinting (MRF) methods typically rely on dictionary matching to map the temporal MRF signals to quantitative tissue parameters.
Such approaches suffer from inherent discretization errors, as well as high computational complexity as the dictionary size grows.
To alleviate these issues, we propose a HYbrid Deep magnetic ResonAnce fingerprinting (HYDRA) approach, referred to as HYDRA.
Methods:
HYDRA involves two stages: a model-based signature restoration phase and a learningbased parameter restoration phase.
Signal restoration is implemented using low-rank based de-aliasing techniques while parameter restoration is performed
using a deep nonlocal residual convolutional neural network. The designed network is trained on synthesized MRF data simulated with
the Bloch equations and fast imaging with steady-state precession (FISP) sequences.
In test mode, it takes a temporal MRF signal as input and produces the corresponding tissue parameters.
Reference:
----------------------------
If you use the source codes, please refer to the following papers for details and thanks for your citation.
[1] Pingfan Song, Yonina C. Eldar, Gal Mazor, Miguel R. D. Rodrigues, "HYDRA: Hybrid Deep Magnetic Resonance Fingerprinting", Medical Physics, 2019, doi: 10.1002/mp.13727.
[2] Pingfan Song, Yonina C. Eldar, Gal Mazor, Miguel R. D. Rodrigues, “Multimodal Image Super-Resolution via Joint Sparse Representations ...", IEEE Transactions on Computational Imaging, DOI: 10.1109/TCI.2019.2916502.–PingfanSong, Miguel Rodrigues, et al., "Magnetic Resonance Fingerprinting Using a Residual Convolutional Neural Network", ICASSP, pp. 1040-1044. IEEE, 2019.
Usage:
----------------------------
- Run the code 'MRF_FullNL_ResCNN_T1T2_L1000_Train' to train the designed nonlocal residual CNN.
- Run the code 'MRF_FullNL_ResCNN_T1T2_L1000_Test' to test the network on following cases:
case 1: Testing on the synthetic dataset for comparing parameter restoration performance, i.e. testing on simulated MRF temporal signals.
case 2: Testing on the anatomical dataset with full k-space sampling for comparing parameter restoration performance.
case 3: Testing on the anatomical dataset with k-space subsampling factor 15% using Gaussian patterns.
case 4: Testing on the anatomical dataset with k-space subsampling factor 9% using Spiral patterns.
Codes written & compiled by:
----------------------------
Pingfan Song
Electronic and Electrical Engineering, Imperial College London, UK.
p.song@imperial.ac.uk, songpingfan@gmail.com
'''
# In[1]:
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.models import Model
from keras.layers import Dense, Dropout, Flatten, BatchNormalization, Activation
from keras.layers import Embedding, Input
from keras.layers.merge import add
from keras.layers import Conv1D, GlobalAveragePooling1D, MaxPooling1D
from keras.constraints import maxnorm
from keras import regularizers
from keras.optimizers import *
from keras.models import model_from_json # load model from .json file
from keras.callbacks import ModelCheckpoint
from keras.callbacks import LearningRateScheduler
import matplotlib.pyplot as plt
import pickle
import numpy as np
from sklearn.preprocessing import normalize
import os
keras.__version__
import scipy.io
import keras.backend as K
import time
import matplotlib.pyplot as plt
# get_ipython().run_line_magic('matplotlib', 'inline')
from non_local import non_local_block
# In[1]: set GPU resource quota
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.7
set_session(tf.Session(config=config))
# In[34]:
def psnr(target,ref, peak_val=1.):
target_data = np.array(target, dtype=np.float64)
ref_data = np.array(ref,dtype=np.float64)
diff = ref_data - target_data
# print(diff.shape)
diff = diff.flatten('C')
rmse = np.sqrt(np.mean(diff ** 2.))
psnr = 20 * np.log10(peak_val / rmse)
return psnr
def snr(target,ref):
target_data = np.array(target, dtype=np.float64)
ref_data = np.array(ref,dtype=np.float64)
diff = ref_data - target_data
# print(diff.shape)
diff = diff.flatten('C')
rmse = np.sqrt(np.mean(diff ** 2.))
target_data = target_data.flatten('C')
power = np.sqrt(np.mean(target_data ** 2.))
snr = 20*np.log10(power/rmse);
return snr
def rmse(target,ref):
target_data = np.array(target, dtype=np.float64)
ref_data = np.array(ref,dtype=np.float64)
diff = ref_data - target_data
# print(diff.shape)
diff = diff.flatten('C')
rmse = np.sqrt(np.mean(diff ** 2.))
return rmse
def mre(target,ref): # mean relative error
target_data = np.array(target, dtype=np.float64)
ref_data = np.array(ref,dtype=np.float64)
meanRef = np.mean(ref_data.flatten('C'))
if meanRef != 0:
diff = np.abs((ref_data - target_data)/meanRef)
else:
diff = np.abs(ref_data - target_data)
# print(diff.shape)
diff = diff.flatten('C')
mre = np.mean(diff)
return mre
#%%
#%% Case 1
# Testing on the synthetic dataset for comparing parameter restoration performance, i.e. testing on simulated MRF temporal signals.
MRFData = scipy.io.loadmat('D_LUT_L1000_TE10_TestRandom.mat') #
Label = MRFData['LUT']
X = MRFData['D']
X = X[:,0::1] # fully-sampled from 1000 time points;
X = normalize(X, norm = 'l2', axis=1)# L2 normalization along time dimention
X = np.expand_dims(X, axis=2)
plt.figure()
Xpart = X[300:16000:2000,:,0]
print(Xpart.shape)
plt.plot(np.real(np.transpose(Xpart)))
plt.show()
# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
# load weights into new model
#model.load_weights("model.h5") # load saved weights in final epoch.
model.load_weights("weights.best.hdf5") # load saved weights from the checkpoint.
print("Loaded model from disk")
#%%
# calculate predictions
Tstart = time.clock()
predictions = model.predict(X)
Tend = time.clock()
Tcost = Tend - Tstart
# compute correlation coefficients
coeff_T1 = np.corrcoef(Label[:,0],predictions[:,0])
coeff_T1 = coeff_T1[0,1]
coeff_T2 = np.corrcoef(Label[:,1],predictions[:,1])
coeff_T2 = coeff_T2[0,1]
# compute RMSE
PSNR_T1 = psnr(predictions[:,0],Label[:,0],5000)
PSNR_T2 = psnr(predictions[:,1],Label[:,1],2000)
SNR_T1 = snr(predictions[:,0],Label[:,0])
SNR_T2 = snr(predictions[:,1],Label[:,1])
RMSE_T1 = rmse(predictions[:,0],Label[:,0])
RMSE_T2 = rmse(predictions[:,1],Label[:,1])
print('{:0.2f} / {:0.2f}'.format( PSNR_T1 , PSNR_T2 ))
print('{:0.2f} / {:0.2f}'.format( SNR_T1 , SNR_T2 ))
print('{:0.2f} / {:0.2f}'.format( RMSE_T1 , RMSE_T2 ))
print('{:0.8f} / {:0.8f}'.format( coeff_T1 , coeff_T2 ))
#%%
FileName = 'HYDRA_Test_1D_synthetic.npz'
np.savez(FileName,PSNR_T1 = PSNR_T1,PSNR_T2 = PSNR_T2,SNR_T1 = SNR_T1,SNR_T2 = SNR_T2,
RMSE_T1 = RMSE_T1, RMSE_T2 = RMSE_T2, coeff_T1 = coeff_T1, coeff_T2 = coeff_T2,
Label = Label, predictions = predictions, Tcost = Tcost)
#%% load reconstructed 1D synthetic data
Results=np.load(FileName)
print(Results.keys())
print('{:0.2f} / {:0.2f}'.format(Results['PSNR_T1'], Results['PSNR_T2']))
print('{:0.2f} / {:0.2f}'.format(Results['SNR_T1'], Results['SNR_T2']))
print('{:0.2f} / {:0.2f}'.format(Results['RMSE_T1'], Results['RMSE_T2']))
print('{:0.8f} / {:0.8f}'.format(Results['coeff_T1'], Results['coeff_T2']))
T1 = Results['predictions'][:,0]
T1 = T1.flatten()
T2 = Results['predictions'][:,1]
T2 = T2.flatten()
#T1 = np.squeeze(T1)
#T2 = np.squeeze(T2)
FigNameT1 = "T1_CNN_1Dsimu.png"
FigNameT2 = "T2_CNN_1Dsimu.png"
FigNameT1res = "T1_res_CNN_1Dsimu.png"
FigNameT2res = "T2_res_CNN_1Dsimu.png"
FigNameT1corr = "T1_corr_CNN_1Dsimu.png"
FigNameT2corr = "T2_corr_CNN_1Dsimu.png"
FigNameT1error = "T1_error_CNN_1Dsimu.png"
FigNameT2error = "T2_error_CNN_1Dsimu.png"
#%%
# show reconstruction
ind_T1 = np.argsort(Label[:,0])
Label_T1 = Label[ind_T1,0]
predictions_T1 = predictions[ind_T1,0]
ind_T2 = np.argsort(Label[:,1])
Label_T2 = Label[ind_T2[20:80000:1],1]
predictions_T2 = predictions[ind_T2[20:80000:1],1]
ind_T2 = np.argsort(Label[:,1])
Label_T2 = Label[ind_T2,1]
predictions_T2 = predictions[ind_T2,1]
plt.figure(figsize = (3,3))
plt.plot(Label_T1, predictions_T1,'r.',label='Estimation')
plt.plot(Label_T1, Label_T1,'b-',label='Reference')
#plt.title('T1_Corr')
plt.grid(True)
plt.xlim((0, 5000))
plt.ylim((0, 5000))
plt.xlabel('Reference T1 (ms)')
plt.ylabel('Estimated T1 (ms)')
plt.legend(loc='best')
plt.savefig(FigNameT1corr,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure(figsize = (3,3))
plt.plot(Label_T2, predictions_T2,'r.',label='Estimation')
plt.plot(Label_T2, Label_T2,'b-',label='Reference')
#plt.title('T2_Corr')
plt.grid(True)
plt.xlim((0, 2000))
plt.ylim((0, 2000))
plt.xlabel('Reference T2 (ms)')
plt.ylabel('Estimated T2 (ms)')
plt.legend(loc='best')
plt.savefig(FigNameT2corr,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%%
# show error maps
plt.figure(figsize = (3,3))
plt.plot(Label_T1, predictions_T1-Label_T1,'r.',label='Estimation')
plt.grid(True)
plt.xlim((0, 5000))
plt.ylim((-100, 100))
plt.xlabel('Reference T1 (ms)')
plt.ylabel('Error of estimated T1 (ms)')
#plt.legend(loc='best')
plt.savefig(FigNameT1error,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure(figsize = (3,3))
plt.plot(Label_T2, predictions_T2-Label_T2,'r.',label='Estimation')
plt.grid(True)
plt.xlim((0, 2000))
plt.ylim((-40, 40))
plt.xlabel('Reference T2 (ms)')
plt.ylabel('Error of estimated T2 (ms)')
#plt.legend(loc='best')
plt.savefig(FigNameT2error,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%%
# In[22]: Case 2
# Testing on the anatomical dataset with full k-space sampling for comparing parameter restoration performance.
# In specific, testing on a stack of multi-contrast images. Each pixel position leads to a MRF temporal signal.
MRFData = scipy.io.loadmat('MRF_ImageStack_N128_L1000_TE10_Ratio0.15.mat') #MRFData = scipy.io.loadmat('Groundtruth_T1_T2.mat')
print(MRFData.keys())
T1_true = MRFData['T1_128']
T1_true = T1_true[:,:,np.newaxis]
T2_true = MRFData['T2_128']
T2_true = T2_true[:,:,np.newaxis]
print(T1_true.shape, T2_true.shape)
Label = np.concatenate([T1_true, T2_true], axis=2)
Label = Label.reshape((128*128,-1))
print(Label[0:16000:1000].T)
print(Label.shape, Label.dtype)
#X = MRFData['X_estimated_old_mrf']
X = MRFData['X_fullysamp']
print(X.shape, X.dtype)
X = X.reshape((128*128,-1))
X = np.real(X)
#X = X[:,1::5] # sub-sampled from 1000 time points;
# remove those signature with too small value
NormX = np.zeros(X.shape[0])
NormX_index = np.empty(X.shape[0]) # index of valid values
NormX_index[:] = np.nan
print(NormX.shape, NormX_index.shape)
for i in range(0, X.shape[0]):
NormX[i] = np.sum(X[i,:]**2)
if NormX[i] < 1 : #20:
X[i,:] = 0
NormX_index[i] = i
np.set_printoptions(precision=2)
NormX_index = NormX_index[~np.isnan(NormX_index)]
NormX_index = NormX_index.astype('int32') # arrays used as indices must be of integer (or boolean) type
X = normalize(X, norm = 'l2', axis=1)# L2 normalization along time dimention
X = np.expand_dims(X, axis=2)
print(X.shape, X.dtype)
# In[23]:
# show true T1, T2
#MRFData = MRFData = scipy.io.loadmat('Groundtruth_T1_T2.mat')
#T1_true = MRFData['T1_128']
#T2_true = MRFData['T2_128']
#print(T1_true.shape, T2_true.shape)
T1max = 4500
T2max = 2500
mycmap = 'jet' # 'gray'
mydpi = 200
plt.figure()
plt.imshow(T1_true, cmap = mycmap)
plt.colorbar()
plt.clim(0,T1max)
plt.axis('off')
plt.title('T1_true')
plt.grid(True)
plt.savefig("T1_true.png",bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(T2_true, cmap = mycmap)
plt.colorbar()
plt.clim(0,T2max)
plt.axis('off')
plt.title('T2_true')
plt.grid(True)
plt.savefig("T2_true.png",bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
Xpart = X[300:16000:2000,:,0]
print(Xpart.shape)
plt.plot(np.real(np.transpose(Xpart)))
plt.show()
# In[30]:
# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
# load weights into new model
#model.load_weights("model.h5") # load saved weights in final epoch.
model.load_weights("weights.best.hdf5") # load saved weights from the checkpoint.
print("Loaded model from disk")
# In[31]:
# calculate predictions
Tstart = time.clock()
predictions = model.predict(X)
Tend = time.clock()
Tcost = Tend - Tstart
print(predictions.shape)
predictions[NormX_index,:] = 0
print(predictions[0:200:10,:].T)
print(Label[0:200:10,:].T)
predictions = predictions.reshape((128,128,2))
print(predictions.shape, predictions.dtype)
# In[35]:
T1max = 4500
T2max = 2500
T1_true = np.squeeze(T1_true)
T2_true = np.squeeze(T2_true)
T1 = predictions[:,:,0]
T1[np.where((T1<0))] = 0
T1[np.where((T1>T1max))] = T1max
T2 = predictions[:,:,1]
T2[np.where((T2<0))] = 0
T2[np.where((T2>T2max))] = T2max
## remove invalid elements referring to the label.
#T1 = T1 * (T1_true > 0)
#T2 = T2 * (T2_true > 0)
PSNR_T1 = psnr(T1,T1_true,T1max)
PSNR_T2 = psnr(T2,T2_true,T2max)
SNR_T1 = snr(T1,T1_true)
SNR_T2 = snr(T2,T2_true)
RMSE_T1 = rmse(T1,T1_true)
RMSE_T2 = rmse(T2,T2_true)
# compute correlation coefficients
Label = Label.reshape((128*128,-1))
T1 = T1[:,:,np.newaxis]
T2 = T2[:,:,np.newaxis]
predictions = np.concatenate([T1, T2], axis=2)
predictions = predictions.reshape((128*128,-1))
coeff_T1 = np.corrcoef(Label[:,0],predictions[:,0])
coeff_T1 = coeff_T1[0,1]
coeff_T2 = np.corrcoef(Label[:,1],predictions[:,1])
coeff_T2 = coeff_T2[0,1]
print('{:0.2f} / {:0.2f}'.format( PSNR_T1 , PSNR_T2 ))
print('{:0.2f} / {:0.2f}'.format( SNR_T1 , SNR_T2 ))
print('{:0.2f} / {:0.2f}'.format( RMSE_T1 , RMSE_T2 ))
print('{:0.8f} / {:0.8f}'.format( coeff_T1, coeff_T2))
# save results
FileName = 'HYDRA_Test_2D_Anatomical_FullSample.npz'
np.savez(FileName,PSNR_T1 = PSNR_T1,PSNR_T2 = PSNR_T2,SNR_T1 = SNR_T1,SNR_T2 = SNR_T2,
RMSE_T1 = RMSE_T1, RMSE_T2 = RMSE_T2, coeff_T1 = coeff_T1, coeff_T2 = coeff_T2,
T1 = T1, T2 = T2, T1_true = T1_true,T2_true = T2_true, Tcost = Tcost)
# In[38]:
Results=np.load(FileName)
print('{:0.2f} / {:0.2f}'.format(Results['PSNR_T1'], Results['PSNR_T2']))
print('{:0.2f} / {:0.2f}'.format(Results['SNR_T1'], Results['SNR_T2']))
print('{:0.2f} / {:0.2f}'.format(Results['RMSE_T1'], Results['RMSE_T2']))
print('{:0.8f} / {:0.8f}'.format(Results['coeff_T1'], Results['coeff_T2']))
#print(Results['val_loss'][-10:-1],Results['loss'][-10:-1])
T1 = Results['T1']
T2 = Results['T2']
T1 = np.squeeze(T1)
T2 = np.squeeze(T2)
FigNameT1 = "T1_CNN_FullSample.png"
FigNameT2 = "T2_CNN_FullSample.png"
FigNameT1res = "T1_res_CNN_FullSample.png"
FigNameT2res = "T2_res_CNN_FullSample.png"
FigNameT1corr = "T1_corr_CNN_FullSample.png"
FigNameT2corr = "T2_corr_CNN_FullSample.png"
FigNameT1error = "T1_error_CNN_FullSample.png"
FigNameT2error = "T2_error_CNN_FullSample.png"
#%%
mycmap = 'jet' # 'gray'
mydpi = 200
plt.figure()
plt.imshow(T1, cmap = mycmap)
plt.colorbar()
plt.clim(0,T1max)
plt.axis('off')
#plt.title('T1_Rec')
plt.grid(True)
plt.savefig(FigNameT1,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(T2, cmap = mycmap)
plt.colorbar()
plt.clim(0,T2max)
plt.axis('off')
#plt.title('T2_Rec')
plt.grid(True)
plt.savefig(FigNameT2,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(np.abs(T1_true-T1), cmap = mycmap)
plt.colorbar()
plt.clim(0,20)
plt.axis('off')
#plt.title('T1_residual')
plt.grid(True)
plt.savefig(FigNameT1res,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(np.abs(T2_true-T2), cmap = mycmap)
plt.colorbar()
plt.clim(0,10)
plt.axis('off')
#plt.title('T2_residual')
plt.grid(True)
plt.savefig(FigNameT2res,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%% show correlation coefficients
ind_T1 = np.argsort(T1_true.flatten())
temp = T1_true.flatten();
Label_T1 = temp[ind_T1]
temp = T1.flatten();
predictions_T1 = temp[ind_T1]
ind_T2 = np.argsort(T2_true.flatten())
temp = T2_true.flatten();
Label_T2 = temp[ind_T2]
temp = T2.flatten();
predictions_T2 = temp[ind_T2]
#%%
plt.figure(figsize = (3,3))
plt.plot(Label_T1, predictions_T1,'r.',label='Estimation')
plt.plot(Label_T1, Label_T1,'b-',label='Reference')
#plt.title('T1_Corr')
plt.grid(True)
plt.xlim((0, 5000))
plt.ylim((0, 5000))
plt.xlabel('Reference T1 (ms)')
plt.ylabel('Estimated T1 (ms)')
plt.legend(loc='best')
plt.savefig(FigNameT1corr,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure(figsize = (3,3))
plt.plot(Label_T2, predictions_T2,'r.',label='Estimation')
plt.plot(Label_T2, Label_T2,'b-',label='Reference')
#plt.title('T2_Corr')
plt.grid(True)
plt.xlim((0, 2000))
plt.ylim((0, 2000))
plt.xlabel('Reference T2 (ms)')
plt.ylabel('Estimated T2 (ms)')
plt.legend(loc='best')
plt.savefig(FigNameT2corr,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%%
# show error maps
mydpi = 200
plt.figure(figsize = (3,3))
plt.plot(Label_T1, predictions_T1-Label_T1,'r.',label='Estimation')
plt.grid(True)
plt.xlim((0, 5000))
plt.ylim((-100, 100))
plt.xlabel('Reference T1 (ms)')
plt.ylabel('Error of estimated T1 (ms)')
#plt.legend(loc='best')
plt.savefig(FigNameT1error,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure(figsize = (3,3))
plt.plot(Label_T2, predictions_T2-Label_T2,'r.',label='Estimation')
plt.grid(True)
plt.xlim((0, 2000))
plt.ylim((-40, 40))
plt.xlabel('Reference T2 (ms)')
plt.ylabel('Error of estimated T2 (ms)')
#plt.legend(loc='best')
plt.savefig(FigNameT2error,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%%
# In[ ]:
# case 3: Testing on the anatomical dataset with k-space subsampling factor 15% using Gaussian patterns.
MRFData = scipy.io.loadmat('Groundtruth_T1_T2.mat')
print(MRFData.keys())
T1_true = MRFData['T1_128']
T1_true = T1_true[:,:,np.newaxis]
T2_true = MRFData['T2_128']
T2_true = T2_true[:,:,np.newaxis]
print(T1_true.shape, T2_true.shape)
Label = np.concatenate([T1_true, T2_true], axis=2)
Label = Label.reshape((128*128,-1))
print(Label[0:16000:1000].T)
print(Label.shape, Label.dtype)
MRFData_Est = scipy.io.loadmat('X_FLOR_Gaussian_Ratio0_15_L1000.mat') # Gaussian pattern
print(MRFData_Est.keys())
X = MRFData_Est['X_estimated_flor']
print(X.shape, X.dtype)
X = X.reshape((128*128,-1))
X = np.real(X)
# remove those signature with too small value
NormX = np.zeros(X.shape[0])
NormX_index = np.empty(X.shape[0]) # index of valid values
NormX_index[:] = np.nan
print(NormX.shape, NormX_index.shape)
for i in range(0, X.shape[0]):
NormX[i] = np.sum(X[i,:]**2)
if NormX[i] < 10: #8: #10: # 1: #20: 125
X[i,:] = 0
NormX_index[i] = i
np.set_printoptions(precision=2)
NormX_index = NormX_index[~np.isnan(NormX_index)]
NormX_index = NormX_index.astype('int32') # arrays used as indices must be of integer (or boolean) type
X = normalize(X, norm = 'l2', axis=1)# L2 normalization along time dimention
X = np.expand_dims(X, axis=2)
print(X.shape, X.dtype)
# In[23]:
plt.figure()
Xpart = X[300:16000:2000,:,0]
print(Xpart.shape)
plt.plot(np.real(np.transpose(Xpart)))
plt.show()
#%%
# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
# load weights into new model
#model.load_weights("model.h5") # load saved weights in final epoch.
model.load_weights("weights.best.hdf5") # load saved weights from the checkpoint.
print("Loaded model from disk")
# In[31]:
# calculate predictions
Tstart = time.clock()
predictions = model.predict(X)
Tend = time.clock()
Tcost = Tend - Tstart
predictions[NormX_index,:] = 0
print(predictions.shape)
print(predictions[0:200:10,:].T)
print(Label[0:200:10,:].T)
predictions = predictions.reshape((128,128,2))
# In[35]:
T1max = 4500
T2max = 2500
T1_true = np.squeeze(T1_true)
T2_true = np.squeeze(T2_true)
T1 = predictions[:,:,0]
T1[np.where((T1<0))] = 0
T1[np.where((T1>T1max))] = T1max
T2 = predictions[:,:,1]
T2[np.where((T2<0))] = 0
T2[np.where((T2>T2max))] = T2max
# remove invalid elements referring to the label.
T1 = T1 * (T1_true > 0)
T2 = T2 * (T2_true > 0)
print(T1_true.shape, T1.shape)
PSNR_T1 = psnr(T1,T1_true,T1max)
PSNR_T2 = psnr(T2,T2_true,T2max)
SNR_T1 = snr(T1,T1_true)
SNR_T2 = snr(T2,T2_true)
RMSE_T1 = rmse(T1,T1_true)
RMSE_T2 = rmse(T2,T2_true)
MRE_T1 = mre(T1,T1_true)
MRE_T2 = mre(T2,T2_true)
# compute correlation coefficients
Label = Label.reshape((128*128,-1))
T1 = T1[:,:,np.newaxis]
T2 = T2[:,:,np.newaxis]
predictions = np.concatenate([T1, T2], axis=2)
predictions = predictions.reshape((128*128,-1))
coeff_T1 = np.corrcoef(Label[:,0],predictions[:,0])
coeff_T1 = coeff_T1[0,1]
coeff_T2 = np.corrcoef(Label[:,1],predictions[:,1])
coeff_T2 = coeff_T2[0,1]
print('{:0.2f} / {:0.2f}'.format( PSNR_T1 , PSNR_T2 ))
print('{:0.2f} / {:0.2f}'.format( SNR_T1 , SNR_T2 ))
print('{:0.2f} / {:0.2f}'.format( RMSE_T1 , RMSE_T2 ))
print('{:0.8f} / {:0.8f}'.format( coeff_T1, coeff_T2))
print('{:0.2f} / {:0.2f}'.format( MRE_T1 , MRE_T2 ))
# save results
FileName = 'HYDRA_Test_2D_Anatomical_SubSample.npz'
np.savez(FileName,PSNR_T1 = PSNR_T1,PSNR_T2 = PSNR_T2,SNR_T1 = SNR_T1,SNR_T2 = SNR_T2,
RMSE_T1 = RMSE_T1, RMSE_T2 = RMSE_T2, coeff_T1 = coeff_T1, coeff_T2 = coeff_T2,
T1 = T1, T2 = T2, T1_true = T1_true,T2_true = T2_true, Tcost = Tcost)
# In[38]:
Results=np.load(FileName)
print('{:0.2f} / {:0.2f}'.format(Results['PSNR_T1'], Results['PSNR_T2']))
print('{:0.2f} / {:0.2f}'.format(Results['SNR_T1'], Results['SNR_T2']))
print('{:0.2f} / {:0.2f}'.format(Results['RMSE_T1'], Results['RMSE_T2']))
print('{:0.8f} / {:0.8f}'.format(Results['coeff_T1'], Results['coeff_T2']))
#print(Results['val_loss'][-10:-1],Results['loss'][-10:-1])
T1 = Results['T1']
T2 = Results['T2']
T1 = np.squeeze(T1)
T2 = np.squeeze(T2)
FigNameT1 = "T1_CNN_SubSample.png"
FigNameT2 = "T2_CNN_SubSample.png"
FigNameT1res = "T1_res_CNN_SubSample.png"
FigNameT2res = "T2_res_CNN_SubSample.png"
FigNameT1corr = "T1_corr_CNN_SubSample.png"
FigNameT2corr = "T2_corr_CNN_SubSample.png"
FigNameT1error = "T1_error_CNN_SubSample.png"
FigNameT2error = "T2_error_CNN_SubSample.png"
# In[36]:
mycmap = 'jet' # 'gray'
mydpi = 200
plt.figure()
plt.imshow(T1, cmap = mycmap)
plt.colorbar()
plt.clim(0,T1max)
plt.axis('off')
#plt.title('T1_Rec')
plt.grid(True)
plt.savefig(FigNameT1,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(T2, cmap = mycmap)
plt.colorbar()
plt.clim(0,T2max)
plt.axis('off')
#plt.title('T2_Rec')
plt.grid(True)
plt.savefig(FigNameT2,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(np.abs(T1_true-T1), cmap = mycmap)
plt.colorbar()
plt.clim(0,200)
plt.axis('off')
#plt.title('T1_residual')
plt.grid(True)
plt.savefig(FigNameT1res,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(np.abs(T2_true-T2), cmap = mycmap)
plt.colorbar()
plt.clim(0,100)
plt.axis('off')
#plt.title('T2_residual')
plt.grid(True)
plt.savefig(FigNameT2res,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%% show correlation coefficients
ind_T1 = np.argsort(T1_true.flatten())
temp = T1_true.flatten();
Label_T1 = temp[ind_T1]
temp = T1.flatten();
predictions_T1 = temp[ind_T1]
ind_T2 = np.argsort(T2_true.flatten())
temp = T2_true.flatten();
Label_T2 = temp[ind_T2]
temp = T2.flatten();
predictions_T2 = temp[ind_T2]
#%%
plt.figure(figsize = (3,3))
plt.plot(Label_T1, predictions_T1,'r.',label='Estimation')
plt.plot(Label_T1, Label_T1,'b-',label='Reference')
#plt.title('T1_Corr')
plt.grid(True)
plt.xlim((0, 5000))
plt.ylim((0, 5000))
plt.xlabel('Reference T1 (ms)')
plt.ylabel('Estimated T1 (ms)')
plt.legend(loc='best')
plt.savefig(FigNameT1corr,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure(figsize = (3,3))
plt.plot(Label_T2, predictions_T2,'r.',label='Estimation')
plt.plot(Label_T2, Label_T2,'b-',label='Reference')
#plt.title('T2_Corr')
plt.grid(True)
plt.xlim((0, 2000))
plt.ylim((0, 2000))
plt.xlabel('Reference T2 (ms)')
plt.ylabel('Estimated T2 (ms)')
plt.legend(loc='best')
plt.savefig(FigNameT2corr,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%%
# show error maps
plt.figure(figsize = (3,3))
plt.plot(Label_T1, predictions_T1-Label_T1,'r.',label='Estimation')
plt.grid(True)
plt.xlim((0, 5000))
plt.ylim((-100, 100))
plt.xlabel('Reference T1 (ms)')
plt.ylabel('Error of estimated T1 (ms)')
#plt.legend(loc='best')
plt.savefig(FigNameT1error,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure(figsize = (3,3))
plt.plot(Label_T2, predictions_T2-Label_T2,'r.',label='Estimation')
plt.grid(True)
plt.xlim((0, 2000))
plt.ylim((-40, 40))
plt.xlabel('Reference T2 (ms)')
plt.ylabel('Error of estimated T2 (ms)')
#plt.legend(loc='best')
plt.savefig(FigNameT2error,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
# In[37]:
# In[ ]:
# case 4: Testing on the anatomical dataset with k-space subsampling factor 9% using Spiral patterns.
MRFData = scipy.io.loadmat('Groundtruth_T1_T2.mat')
print(MRFData.keys())
T1_true = MRFData['T1_128']
T1_true = T1_true[:,:,np.newaxis]
T2_true = MRFData['T2_128']
T2_true = T2_true[:,:,np.newaxis]
print(T1_true.shape, T2_true.shape)
Label = np.concatenate([T1_true, T2_true], axis=2)
Label = Label.reshape((128*128,-1))
print(Label[0:16000:1000].T)
print(Label.shape, Label.dtype)
MRFData_Est = scipy.io.loadmat('X_FLOR_Spiral_Ratio0_09_L1000.mat') # Spiral pattern
print(MRFData_Est.keys())
X = MRFData_Est['X_estimated_flor']
print(X.shape, X.dtype)
X = X.reshape((128*128,-1))
X = np.real(X)
# remove those signature with too small value
NormX = np.zeros(X.shape[0])
NormX_index = np.empty(X.shape[0]) # index of valid values
NormX_index[:] = np.nan
print(NormX.shape, NormX_index.shape)
for i in range(0, X.shape[0]):
NormX[i] = np.sum(X[i,:]**2)
if NormX[i] < 150: #8: #10: # 1: #20: 125
X[i,:] = 0
NormX_index[i] = i
np.set_printoptions(precision=2)
NormX_index = NormX_index[~np.isnan(NormX_index)]
NormX_index = NormX_index.astype('int32') # arrays used as indices must be of integer (or boolean) type
X = normalize(X, norm = 'l2', axis=1)# L2 normalization along time dimention
X = np.expand_dims(X, axis=2)
print(X.shape, X.dtype)
# In[23]:
plt.figure()
Xpart = X[300:16000:2000,:,0]
print(Xpart.shape)
plt.plot(np.real(np.transpose(Xpart)))
plt.show()
#%%
# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
# load weights into new model
#model.load_weights("model.h5") # load saved weights in final epoch.
model.load_weights("weights.best.hdf5") # load saved weights from the checkpoint.
print("Loaded model from disk")
# In[31]:
# calculate predictions
Tstart = time.clock()
predictions = model.predict(X)
Tend = time.clock()
Tcost = Tend - Tstart
predictions[NormX_index,:] = 0
print(predictions.shape)
print(predictions[0:200:10,:].T)
print(Label[0:200:10,:].T)
predictions = predictions.reshape((128,128,2))
# In[35]:
T1max = 4500
T2max = 2500
T1_true = np.squeeze(T1_true)
T2_true = np.squeeze(T2_true)
T1 = predictions[:,:,0]
T1[np.where((T1<0))] = 0
T1[np.where((T1>T1max))] = T1max
T2 = predictions[:,:,1]
T2[np.where((T2<0))] = 0
T2[np.where((T2>T2max))] = T2max
# remove invalid elements referring to the label.
T1 = T1 * (T1_true > 0)
T2 = T2 * (T2_true > 0)
print(T1_true.shape, T1.shape)
PSNR_T1 = psnr(T1,T1_true,T1max)
PSNR_T2 = psnr(T2,T2_true,T2max)
SNR_T1 = snr(T1,T1_true)
SNR_T2 = snr(T2,T2_true)
RMSE_T1 = rmse(T1,T1_true)
RMSE_T2 = rmse(T2,T2_true)
MRE_T1 = mre(T1,T1_true)
MRE_T2 = mre(T2,T2_true)
# compute correlation coefficients
Label = Label.reshape((128*128,-1))
T1 = T1[:,:,np.newaxis]
T2 = T2[:,:,np.newaxis]
predictions = np.concatenate([T1, T2], axis=2)
predictions = predictions.reshape((128*128,-1))
coeff_T1 = np.corrcoef(Label[:,0],predictions[:,0])
coeff_T1 = coeff_T1[0,1]
coeff_T2 = np.corrcoef(Label[:,1],predictions[:,1])
coeff_T2 = coeff_T2[0,1]
print('{:0.2f} / {:0.2f}'.format( PSNR_T1 , PSNR_T2 ))
print('{:0.2f} / {:0.2f}'.format( SNR_T1 , SNR_T2 ))
print('{:0.2f} / {:0.2f}'.format( RMSE_T1 , RMSE_T2 ))
print('{:0.8f} / {:0.8f}'.format( coeff_T1, coeff_T2))
print('{:0.2f} / {:0.2f}'.format( MRE_T1 , MRE_T2 ))
# save results
FileName = 'HYDRA_Test_2D_Anatomical_SpiralSubSample.npz'
np.savez(FileName,PSNR_T1 = PSNR_T1,PSNR_T2 = PSNR_T2,SNR_T1 = SNR_T1,SNR_T2 = SNR_T2,
RMSE_T1 = RMSE_T1, RMSE_T2 = RMSE_T2, coeff_T1 = coeff_T1, coeff_T2 = coeff_T2,
T1 = T1, T2 = T2, T1_true = T1_true,T2_true = T2_true, Tcost = Tcost)
# In[38]:
Results=np.load(FileName)
print('{:0.2f} / {:0.2f}'.format(Results['PSNR_T1'], Results['PSNR_T2']))
print('{:0.2f} / {:0.2f}'.format(Results['SNR_T1'], Results['SNR_T2']))
print('{:0.2f} / {:0.2f}'.format(Results['RMSE_T1'], Results['RMSE_T2']))
print('{:0.8f} / {:0.8f}'.format(Results['coeff_T1'], Results['coeff_T2']))
#print(Results['val_loss'][-10:-1],Results['loss'][-10:-1])
T1 = Results['T1']
T2 = Results['T2']
T1 = np.squeeze(T1)
T2 = np.squeeze(T2)
FigNameT1 = "T1_CNN_SpiralSubSample.png"
FigNameT2 = "T2_CNN_SpiralSubSample.png"
FigNameT1res = "T1_res_CNN_SpiralSubSample.png"
FigNameT2res = "T2_res_CNN_SpiralSubSample.png"
FigNameT1corr = "T1_corr_CNN_SpiralSubSample.png"
FigNameT2corr = "T2_corr_CNN_SpiralSubSample.png"
FigNameT1error = "T1_error_CNN_SpiralSubSample.png"
FigNameT2error = "T2_error_CNN_SpiralSubSample.png"
# In[36]:
mycmap = 'jet' # 'gray'
mydpi = 200
plt.figure()
plt.imshow(T1, cmap = mycmap)
plt.colorbar()
plt.clim(0,T1max)
plt.axis('off')
#plt.title('T1_Rec')
plt.grid(True)
plt.savefig(FigNameT1,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(T2, cmap = mycmap)
plt.colorbar()
plt.clim(0,T2max)
plt.axis('off')
#plt.title('T2_Rec')
plt.grid(True)
plt.savefig(FigNameT2,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(np.abs(T1_true-T1), cmap = mycmap)
plt.colorbar()
plt.clim(0,200)
plt.axis('off')
#plt.title('T1_residual')
plt.grid(True)
plt.savefig(FigNameT1res,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure()
plt.imshow(np.abs(T2_true-T2), cmap = mycmap)
plt.colorbar()
plt.clim(0,100)
plt.axis('off')
#plt.title('T2_residual')
plt.grid(True)
plt.savefig(FigNameT2res,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%% show correlation coefficients
ind_T1 = np.argsort(T1_true.flatten())
temp = T1_true.flatten();
Label_T1 = temp[ind_T1]
temp = T1.flatten();
predictions_T1 = temp[ind_T1]
ind_T2 = np.argsort(T2_true.flatten())
temp = T2_true.flatten();
Label_T2 = temp[ind_T2]
temp = T2.flatten();
predictions_T2 = temp[ind_T2]
#%%
plt.figure(figsize = (3,3))
plt.plot(Label_T1, predictions_T1,'r.',label='Estimation')
plt.plot(Label_T1, Label_T1,'b-',label='Reference')
#plt.title('T1_Corr')
plt.grid(True)
plt.xlim((0, 5000))
plt.ylim((0, 5000))
plt.xlabel('Reference T1 (ms)')
plt.ylabel('Estimated T1 (ms)')
plt.legend(loc='best')
plt.savefig(FigNameT1corr,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure(figsize = (3,3))
plt.plot(Label_T2, predictions_T2,'r.',label='Estimation')
plt.plot(Label_T2, Label_T2,'b-',label='Reference')
#plt.title('T2_Corr')
plt.grid(True)
plt.xlim((0, 2000))
plt.ylim((0, 2000))
plt.xlabel('Reference T2 (ms)')
plt.ylabel('Estimated T2 (ms)')
plt.legend(loc='best')
plt.savefig(FigNameT2corr,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
#%%
# show error maps
plt.figure(figsize = (3,3))
plt.plot(Label_T1, predictions_T1-Label_T1,'r.',label='Estimation')
plt.grid(True)
plt.xlim((0, 5000))
plt.ylim((-100, 100))
plt.xlabel('Reference T1 (ms)')
plt.ylabel('Error of estimated T1 (ms)')
#plt.legend(loc='best')
plt.savefig(FigNameT1error,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
plt.figure(figsize = (3,3))
plt.plot(Label_T2, predictions_T2-Label_T2,'r.',label='Estimation')
plt.grid(True)
plt.xlim((0, 2000))
plt.ylim((-40, 40))
plt.xlabel('Reference T2 (ms)')
plt.ylabel('Error of estimated T2 (ms)')
#plt.legend(loc='best')
plt.savefig(FigNameT2error,bbox_inches='tight',transparent = True,pad_inches = 0,dpi=mydpi)
plt.show()
# In[37]:
| 27.520492 | 375 | 0.707163 | 5,409 | 33,575 | 4.24755 | 0.093733 | 0.014886 | 0.014363 | 0.018281 | 0.828205 | 0.793079 | 0.779978 | 0.77728 | 0.770533 | 0.770533 | 0 | 0.062079 | 0.119136 | 33,575 | 1,219 | 376 | 27.543068 | 0.714725 | 0.207297 | 0 | 0.841623 | 0 | 0 | 0.130938 | 0.03975 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005236 | false | 0 | 0.032723 | 0 | 0.043194 | 0.106021 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5c8618036859301f5786d09432cdc342589cd11a | 21,242 | py | Python | lib/extensions/pacnet/test_pac.py | shampooma/openseg.pytorch | d1da408a1e870d52c058c359583bc098f7f3d9e2 | [
"MIT"
] | 1,069 | 2019-01-21T04:32:05.000Z | 2022-03-30T12:07:36.000Z | lib/extensions/pacnet/test_pac.py | shampooma/openseg.pytorch | d1da408a1e870d52c058c359583bc098f7f3d9e2 | [
"MIT"
] | 88 | 2019-02-13T03:43:09.000Z | 2022-03-27T08:23:29.000Z | lib/extensions/pacnet/test_pac.py | shampooma/openseg.pytorch | d1da408a1e870d52c058c359583bc098f7f3d9e2 | [
"MIT"
] | 124 | 2019-01-23T01:46:00.000Z | 2022-03-26T14:07:23.000Z | """
Copyright (C) 2019 NVIDIA Corporation. All rights reserved.
Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode).
"""
import unittest
from functools import wraps
import numpy as np
import torch as th
from torch import nn
from torch.autograd import gradcheck
import pac
def _allclose(x1, x2, rtol=1e-5, atol=1e-10):
return np.allclose(x1.cpu(), x2.cpu(), rtol=rtol, atol=atol)
def _gradcheck(f, x0, rtol=1e-3, atol=1e-8):
return gradcheck(f, x0, rtol=rtol, atol=atol)
# test both native autograd version and Function version
def repeat_impl_types(f):
@wraps(f)
def call_wrapped(self, *args):
f(self, *args, native_impl=True)
f(self, *args, native_impl=False)
return call_wrapped
# some features are not yet implemented using custom Function
def use_only_native_impl(f):
@wraps(f)
def call_wrapped(self, *args):
f(self, *args, native_impl=True)
return call_wrapped
# test only the version with custom Function
def use_only_custom_impl(f):
@wraps(f)
def call_wrapped(self, *args):
f(self, *args, native_impl=False)
return call_wrapped
class PacConvTest(unittest.TestCase):
def setUp(self):
self.device = th.device('cuda:0')
th.cuda.set_device(0)
@repeat_impl_types
def test_conv_forward_const_kernel(self, native_impl):
bs, sz, k_ch = 2, 111, 5
args = dict(in_channels=4, out_channels=3, kernel_size=5, stride=2, padding=4, dilation=2)
im = th.rand(bs, args['in_channels'], sz, sz).to(self.device)
im_th = im.clone()
im_k = th.ones(bs, k_ch, sz, sz).to(self.device)
conv_w = th.rand(args['out_channels'], args['in_channels'],
args['kernel_size'], args['kernel_size']).to(self.device)
conv_b = th.rand(args['out_channels']).to(self.device)
conv = pac.PacConv2d(native_impl=native_impl, **args).to(self.device)
conv_th = nn.Conv2d(**args).to(self.device)
conv.weight.data[:] = conv_th.weight.data[:] = conv_w
conv.bias.data[:] = conv_th.bias.data[:] = conv_b
_allclose(conv(im, im_k).detach(), conv_th(im_th).detach())
@repeat_impl_types
def test_conv_transpose_forward_const_kernel(self, native_impl):
bs, sz, k_ch = 4, 128, 5
args = dict(in_channels=4, out_channels=3, kernel_size=5, stride=2, padding=2, output_padding=1, dilation=1)
k_with_d = (args['kernel_size'] - 1) * args['dilation'] + 1
sz_out = (sz - 1) * args['stride'] - 2 * args['padding'] + k_with_d + args['output_padding']
im = th.rand(bs, args['in_channels'], sz, sz).to(self.device)
im_th = im.clone()
im_k = th.ones(bs, k_ch, sz_out, sz_out).to(self.device)
conv_w = th.rand(args['in_channels'], args['out_channels'],
args['kernel_size'], args['kernel_size']).to(self.device)
conv_b = th.rand(args['out_channels']).to(self.device)
conv = pac.PacConvTranspose2d(native_impl=native_impl, **args).to(self.device)
conv_th = nn.ConvTranspose2d(**args).to(self.device)
conv.weight.data[:] = conv_th.weight.data[:] = conv_w
conv.bias.data[:] = conv_th.bias.data[:] = conv_b
_allclose(conv(im, im_k).detach(), conv_th(im_th).detach())
@repeat_impl_types
def test_pool_forward_const_kernel(self, native_impl):
bs, sz, in_ch, k_ch = 2, 9, 4, 5
dilation = 1
args = dict(kernel_size=5, stride=2, padding=2)
im = th.rand(bs, in_ch, sz, sz).to(self.device)
im_th = im.clone()
im_k = th.ones(bs, k_ch, sz, sz).to(self.device)
pool = pac.PacPool2d(dilation=dilation, native_impl=native_impl, **args).to(self.device)
pool_th = nn.AvgPool2d(**args).to(self.device)
_allclose(pool(im, im_k).detach(), pool_th(im_th).detach())
@repeat_impl_types
def test_conv_input_grad(self, native_impl):
bs, sz, k_ch = 2, 8, 3
args = dict(in_channels=4, out_channels=2, kernel_size=3, stride=2, padding=1, dilation=1)
im = th.rand(bs, args['in_channels'], sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz, sz).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv = pac.PacConv2d(native_impl=native_impl, **args).double().to(self.device)
self.assertTrue(_gradcheck(conv, (im, im_k)))
@use_only_native_impl
def test_conv_inv_kernel_input_grad(self, native_impl):
bs, sz, k_ch = 2, 8, 3
args = dict(in_channels=4, out_channels=2, kernel_size=3, stride=2, padding=1, dilation=1,
kernel_type='inv_0.2_0.2_asym', smooth_kernel_type='average_5', normalize_kernel=True)
im = th.rand(bs, args['in_channels'], sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz, sz).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv = pac.PacConv2d(native_impl=native_impl, **args).double().to(self.device)
self.assertTrue(_gradcheck(conv, (im, im_k)))
@repeat_impl_types
def test_conv_all_grad(self, native_impl):
bs, sz, k_ch, f_sz, in_ch, out_ch = 2, 10, 3, 5, 2, 4
conv_args = dict(stride=1, padding=2, dilation=2)
kernel_args = dict(kernel_size=f_sz, smooth_kernel=None, inv_alpha=None, inv_lambda=None,
kernel_type='gaussian', smooth_kernel_type='none',
channel_wise=False, normalize_kernel=False, transposed=False,
**conv_args)
im = th.rand(bs, in_ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz, sz).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv_w = th.rand(out_ch, in_ch, f_sz, f_sz).double().to(self.device)
conv_b = th.rand(out_ch).double().to(self.device)
self.assertTrue(_gradcheck(
lambda in0, in1, w, b: pac.pacconv2d(in0,
pac.packernel2d(in1, **kernel_args)[0],
w, b, native_impl=native_impl, **conv_args),
(im, im_k, conv_w, conv_b)))
@repeat_impl_types
def test_conv_transpose_input_grad(self, native_impl):
bs, sz, k_ch = 1, 4, 2
args = dict(in_channels=2, out_channels=3, kernel_size=3, stride=2, padding=1, output_padding=1, dilation=1)
k_with_d = (args['kernel_size'] - 1) * args['dilation'] + 1
sz_out = (sz - 1) * args['stride'] - 2 * args['padding'] + k_with_d + args['output_padding']
im = th.rand(bs, args['in_channels'], sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz_out, sz_out).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv = pac.PacConvTranspose2d(native_impl=native_impl, **args).double().to(self.device)
self.assertTrue(_gradcheck(conv, (im, im_k)))
@repeat_impl_types
def test_conv_transpose_all_grad(self, native_impl):
bs, sz, k_ch, f_sz, in_ch, out_ch = 2, 3, 3, 3, 2, 3
conv_args = dict(stride=2, padding=1, output_padding=1, dilation=1)
kernel_args = dict(kernel_size=f_sz, smooth_kernel=None, inv_alpha=None, inv_lambda=None,
kernel_type='gaussian', smooth_kernel_type='none',
channel_wise=False, normalize_kernel=False, transposed=True,
**conv_args)
k_with_d = (f_sz - 1) * conv_args['dilation'] + 1
sz_out = (sz - 1) * conv_args['stride'] - 2 * conv_args['padding'] + k_with_d + conv_args['output_padding']
im = th.rand(bs, in_ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz_out, sz_out).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv_w = th.rand(in_ch, out_ch, f_sz, f_sz).double().to(self.device)
conv_b = th.rand(out_ch).double().to(self.device)
self.assertTrue(_gradcheck(
lambda in0, in1, w, b: pac.pacconv_transpose2d(in0,
pac.packernel2d(in1, **kernel_args)[0],
w, b, native_impl=native_impl, **conv_args),
(im, im_k, conv_w, conv_b)))
@repeat_impl_types
def test_pool_grad(self, native_impl):
bs, sz, ch, k_ch = 2, 8, 2, 3
args = dict(kernel_size=5, stride=2, padding=4, dilation=2)
im = th.rand(bs, ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz, sz).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
pool = pac.PacPool2d(native_impl=native_impl, **args).double().to(self.device)
self.assertTrue(_gradcheck(pool, (im, im_k)))
def test_conv_two_impl_match(self):
bs, sz, k_ch = 24, 128, 3
args = dict(in_channels=4, out_channels=2, kernel_size=3, stride=2, padding=2, dilation=2)
im = th.rand(bs, args['in_channels'], sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz, sz).double().to(self.device)
im0 = im.clone()
im0_k = im_k.clone()
im.requires_grad = im_k.requires_grad = True
im0.requires_grad = im0_k.requires_grad = True
conv = pac.PacConv2d(native_impl=False, **args).double().to(self.device)
conv0 = pac.PacConv2d(native_impl=True, **args).double().to(self.device)
conv_w = th.rand(args['out_channels'], args['in_channels'],
args['kernel_size'], args['kernel_size']).double().to(self.device)
conv_b = th.rand(args['out_channels']).double().to(self.device)
conv.weight.data[:] = conv0.weight.data[:] = conv_w
conv.bias.data[:] = conv0.bias.data[:] = conv_b
out = conv(im, im_k)
out0 = conv0(im0, im0_k)
out.sum().backward()
out0.sum().backward()
self.assertTrue(_allclose(out.detach(), out0.detach()))
self.assertTrue(_allclose(im.grad, im0.grad))
self.assertTrue(_allclose(im_k.grad, im0_k.grad))
self.assertTrue(_allclose(conv.weight.grad, conv0.weight.grad))
self.assertTrue(_allclose(conv.bias.grad, conv0.bias.grad))
def test_conv_with_kernel_input_two_impl_match(self):
bs, sz, k_ch = 24, 128, 3
args = dict(in_channels=4, out_channels=2, kernel_size=3, stride=2, padding=2, dilation=2)
im = th.rand(bs, args['in_channels'], sz, sz).double().to(self.device)
out_sz = int(np.floor(
(sz + 2 * args['padding'] - (args['kernel_size'] - 1) * args['dilation'] - 1) / args['stride'])) + 1
im_k = th.rand(bs, 1, args['kernel_size'], args['kernel_size'], out_sz, out_sz).double().to(self.device)
im0 = im.clone()
im0_k = im_k.clone()
im.requires_grad = im_k.requires_grad = True
im0.requires_grad = im0_k.requires_grad = True
conv = pac.PacConv2d(native_impl=False, **args).double().to(self.device)
conv0 = pac.PacConv2d(native_impl=True, **args).double().to(self.device)
conv_w = th.rand(args['out_channels'], args['in_channels'],
args['kernel_size'], args['kernel_size']).double().to(self.device)
conv_b = th.rand(args['out_channels']).double().to(self.device)
conv.weight.data[:] = conv0.weight.data[:] = conv_w
conv.bias.data[:] = conv0.bias.data[:] = conv_b
out = conv(im, None, im_k)
out0 = conv0(im0, None, im0_k)
out.sum().backward()
out0.sum().backward()
self.assertTrue(_allclose(out.detach(), out0.detach()))
self.assertTrue(_allclose(im.grad, im0.grad))
self.assertTrue(_allclose(im_k.grad, im0_k.grad))
self.assertTrue(_allclose(conv.weight.grad, conv0.weight.grad))
self.assertTrue(_allclose(conv.bias.grad, conv0.bias.grad))
def test_conv_transpose_two_impl_match(self):
bs, sz, k_ch = 3, 128, 3
args = dict(in_channels=2, out_channels=3, kernel_size=3, stride=2, padding=1, output_padding=1, dilation=1)
k_with_d = (args['kernel_size'] - 1) * args['dilation'] + 1
sz_out = (sz - 1) * args['stride'] - 2 * args['padding'] + k_with_d + args['output_padding']
im = th.rand(bs, args['in_channels'], sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz_out, sz_out).double().to(self.device)
im0 = im.clone()
im0_k = im_k.clone()
im.requires_grad = im_k.requires_grad = True
im0.requires_grad = im0_k.requires_grad = True
conv = pac.PacConvTranspose2d(native_impl=False, **args).double().to(self.device)
conv0 = pac.PacConvTranspose2d(native_impl=True, **args).double().to(self.device)
conv_w = th.rand(args['in_channels'], args['out_channels'],
args['kernel_size'], args['kernel_size']).double().to(self.device)
conv_b = th.rand(args['out_channels']).double().to(self.device)
conv.weight.data[:] = conv0.weight.data[:] = conv_w
conv.bias.data[:] = conv0.bias.data[:] = conv_b
out = conv(im, im_k)
out0 = conv0(im0, im0_k)
out.sum().backward()
out0.sum().backward()
self.assertTrue(_allclose(out.detach(), out0.detach()))
self.assertTrue(_allclose(im.grad, im0.grad))
self.assertTrue(_allclose(im_k.grad, im0_k.grad))
self.assertTrue(_allclose(conv.weight.grad, conv0.weight.grad))
self.assertTrue(_allclose(conv.bias.grad, conv0.bias.grad))
def test_pool_two_impl_match(self):
bs, sz, ch, k_ch = 2, 128, 4, 3
args = dict(kernel_size=3, stride=2, padding=2, dilation=2)
im = th.rand(bs, ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz, sz).double().to(self.device)
im0 = im.clone()
im0_k = im_k.clone()
im.requires_grad = im_k.requires_grad = True
im0.requires_grad = im0_k.requires_grad = True
pool = pac.PacPool2d(native_impl=False, **args).to(self.device)
p00l0 = pac.PacPool2d(native_impl=True, **args).to(self.device)
out = pool(im, im_k)
out0 = p00l0(im0, im0_k)
out.sum().backward()
out0.sum().backward()
self.assertTrue(_allclose(out.detach(), out0.detach()))
self.assertTrue(_allclose(im.grad, im0.grad))
self.assertTrue(_allclose(im_k.grad, im0_k.grad))
def test_kernel_two_impl_match(self):
bs, sz, ch = 16, 256, 8
args = dict(kernel_size=3, stride=1, padding=1, dilation=1)
im = th.rand(bs, ch, sz, sz).double().to(self.device)
im0 = im.clone()
im.requires_grad = im0.requires_grad = True
out = pac.packernel2d(im, native_impl=False, **args)[0]
out0 = pac.packernel2d(im0, native_impl=True, **args)[0]
out.sum().backward()
out0.sum().backward()
self.assertTrue(_allclose(out.detach(), out0.detach()))
self.assertTrue(_allclose(im.grad, im0.grad))
# Tests below pass on small input sizes, but may fail on larger ones
@repeat_impl_types
def test_conv_sum_all_grad(self, native_impl):
bs, sz, k_ch, f_sz, in_ch, out_ch = 2, 10, 3, 5, 2, 4
conv_args = dict(stride=1, padding=2, dilation=2)
kernel_args = dict(kernel_size=f_sz, smooth_kernel=None, inv_alpha=None, inv_lambda=None,
kernel_type='gaussian', smooth_kernel_type='none',
channel_wise=False, normalize_kernel=False, transposed=False,
**conv_args)
im = th.rand(bs, in_ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz, sz).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv_w = th.rand(out_ch, in_ch, f_sz, f_sz).double().to(self.device)
conv_b = th.rand(out_ch).double().to(self.device)
self.assertTrue(_gradcheck(
lambda in0, in1, w, b: pac.pacconv2d(in0,
pac.packernel2d(in1, **kernel_args)[0],
w, b, native_impl=native_impl, **conv_args).sum(),
(im, im_k, conv_w, conv_b), rtol=0.01))
@repeat_impl_types
def test_conv_transpose_sum_all_grad(self, native_impl):
bs, sz, k_ch, f_sz, in_ch, out_ch = 2, 3, 3, 3, 2, 3
conv_args = dict(stride=2, padding=1, output_padding=1, dilation=1)
kernel_args = dict(kernel_size=f_sz, smooth_kernel=None, inv_alpha=None, inv_lambda=None,
kernel_type='gaussian', smooth_kernel_type='none',
channel_wise=False, normalize_kernel=False, transposed=True,
**conv_args)
k_with_d = (f_sz - 1) * conv_args['dilation'] + 1
sz_out = (sz - 1) * conv_args['stride'] - 2 * conv_args['padding'] + k_with_d + conv_args['output_padding']
im = th.rand(bs, in_ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz_out, sz_out).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv_w = th.rand(in_ch, out_ch, f_sz, f_sz).double().to(self.device)
conv_b = th.rand(out_ch).double().to(self.device)
self.assertTrue(_gradcheck(
lambda in0, in1, w, b: pac.pacconv_transpose2d(in0,
pac.packernel2d(in1, **kernel_args)[0],
w, b, native_impl=native_impl, **conv_args).sum(),
(im, im_k, conv_w, conv_b), rtol=0.01))
@repeat_impl_types
def test_pool_sum_grad(self, native_impl):
bs, sz, ch, k_ch = 2, 8, 2, 3
args = dict(kernel_size=5, stride=2, padding=4, dilation=2)
im = th.rand(bs, ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, k_ch, sz, sz).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
pool = pac.PacPool2d(native_impl=native_impl, **args).double().to(self.device)
self.assertTrue(_gradcheck(lambda x, y: pool(x, y).sum(), (im, im_k), rtol=0.01))
@repeat_impl_types
def test_kernel_sum_grad(self, native_impl):
bs, sz, ch = 2, 4, 4
args = dict(kernel_size=3, stride=2, padding=1, dilation=1)
im = th.rand(bs, ch, sz, sz).double().to(self.device)
im.requires_grad = True
self.assertTrue(_gradcheck(lambda x: pac.packernel2d(x, native_impl=native_impl, **args)[0].sum(),
(im,), rtol=0.01))
@repeat_impl_types
def test_conv_with_kernel_input_sum_all_grad(self, native_impl):
bs, sz, k_ch, f_sz, in_ch, out_ch = 2, 10, 3, 5, 2, 4
args = dict(stride=1, padding=2, dilation=2)
out_sz = int(np.floor((sz + 2 * args['padding'] - (f_sz - 1) * args['dilation'] - 1) / args['stride'])) + 1
im = th.rand(bs, in_ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, 1, f_sz, f_sz, out_sz, out_sz).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv_w = th.rand(out_ch, in_ch, f_sz, f_sz).double().to(self.device)
conv_b = th.rand(out_ch).double().to(self.device)
self.assertTrue(_gradcheck(
lambda in0, in1, w, b: pac.pacconv2d(in0, in1, w, b, native_impl=native_impl, **args).sum(),
(im, im_k, conv_w, conv_b), rtol=0.01))
@repeat_impl_types
def test_conv_transpose_with_kernel_input_sum_all_grad(self, native_impl):
bs, sz, k_ch, f_sz, in_ch, out_ch = 2, 3, 3, 3, 2, 3
args = dict(stride=2, padding=1, output_padding=1, dilation=1)
k_with_d = (f_sz - 1) * args['dilation'] + 1
sz_out = (sz - 1) * args['stride'] - 2 * args['padding'] + k_with_d + args['output_padding']
im = th.rand(bs, in_ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, 1, f_sz, f_sz, sz_out, sz_out).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
conv_w = th.rand(in_ch, out_ch, f_sz, f_sz).double().to(self.device)
conv_b = th.rand(out_ch).double().to(self.device)
self.assertTrue(_gradcheck(
lambda in0, in1, w, b: pac.pacconv_transpose2d(in0, in1, w, b, native_impl=native_impl, **args).sum(),
(im, im_k, conv_w, conv_b), rtol=0.01))
@repeat_impl_types
def test_pool_with_kernel_input_sum_grad(self, native_impl):
bs, sz, ch = 2, 8, 2
args = dict(kernel_size=3, stride=2, padding=2, dilation=2)
out_sz = int(np.floor(
(sz + 2 * args['padding'] - (args['kernel_size'] - 1) * args['dilation'] - 1) / args['stride'])) + 1
im = th.rand(bs, ch, sz, sz).double().to(self.device)
im_k = th.rand(bs, 1, args['kernel_size'], args['kernel_size'], out_sz, out_sz).double().to(self.device)
im.requires_grad = im_k.requires_grad = True
pool = pac.PacPool2d(native_impl=native_impl, **args).double().to(self.device)
self.assertTrue(_gradcheck(lambda x, y: pool(x, None, y).sum(),
(im, im_k), rtol=0.01))
if __name__ == '__main__':
unittest.main()
| 50.099057 | 116 | 0.609688 | 3,227 | 21,242 | 3.78308 | 0.055469 | 0.067988 | 0.080603 | 0.094364 | 0.896625 | 0.885157 | 0.875819 | 0.868856 | 0.848378 | 0.828391 | 0 | 0.026915 | 0.239149 | 21,242 | 423 | 117 | 50.217494 | 0.728437 | 0.018454 | 0 | 0.730337 | 0 | 0 | 0.039925 | 0 | 0 | 0 | 0 | 0 | 0.092697 | 1 | 0.08427 | false | 0 | 0.019663 | 0.005618 | 0.120787 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5c8a92be0f7b69d130798b1e6c60aa063be3200e | 137 | py | Python | tests/utils.py | makingspace/quade | b02f2a5cdf47dc560f0bc2825a4fd7e989846086 | [
"BSD-3-Clause"
] | 4 | 2017-12-22T00:34:10.000Z | 2019-12-07T08:57:29.000Z | tests/utils.py | makingspace/quade | b02f2a5cdf47dc560f0bc2825a4fd7e989846086 | [
"BSD-3-Clause"
] | 22 | 2017-12-24T03:59:20.000Z | 2018-02-01T19:55:48.000Z | tests/utils.py | makingspace/quade | b02f2a5cdf47dc560f0bc2825a4fd7e989846086 | [
"BSD-3-Clause"
] | null | null | null | import os
import unittest
def requires_celery(func):
return unittest.skipUnless(os.getenv("TEST_CELERY"), "Requires Celery")(func)
| 19.571429 | 81 | 0.766423 | 18 | 137 | 5.722222 | 0.611111 | 0.271845 | 0.349515 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116788 | 137 | 6 | 82 | 22.833333 | 0.85124 | 0 | 0 | 0 | 0 | 0 | 0.189781 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 7 |
5c963e5572fea04ce79646a10d8a38c9e461de40 | 54 | py | Python | tests/test_logger.py | nielse63/PiPlanter | 94ed5265fd4d9b4183edd4a67047d976ee5cdd72 | [
"MIT"
] | null | null | null | tests/test_logger.py | nielse63/PiPlanter | 94ed5265fd4d9b4183edd4a67047d976ee5cdd72 | [
"MIT"
] | 118 | 2021-03-08T11:04:41.000Z | 2022-03-31T11:07:05.000Z | tests/test_logger.py | nielse63/PiPlanter | 94ed5265fd4d9b4183edd4a67047d976ee5cdd72 | [
"MIT"
] | null | null | null | import pyplanter.logger
def test_logger():
pass
| 9 | 23 | 0.722222 | 7 | 54 | 5.428571 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.203704 | 54 | 5 | 24 | 10.8 | 0.883721 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
7a4fe6463f0008ea40bfa3afe468c8474eb14b0c | 118,501 | py | Python | openstack_dashboard/dashboards/admin/avos/static/txt/ceilometercommands.py | fossabot/avos | 4aa112a50972b6d29d1abb6fe1b3ec46950ec3d0 | [
"Apache-2.0"
] | 60 | 2015-03-09T14:31:46.000Z | 2021-12-12T19:22:31.000Z | openstack_dashboard/dashboards/admin/avos/static/txt/ceilometercommands.py | 2733284198/avos | becf7dd313fb8569581f985118c8367921c731ab | [
"Apache-2.0"
] | 7 | 2015-04-13T13:21:10.000Z | 2016-02-24T18:38:28.000Z | openstack_dashboard/dashboards/admin/avos/static/txt/ceilometercommands.py | 2733284198/avos | becf7dd313fb8569581f985118c8367921c731ab | [
"Apache-2.0"
] | 13 | 2015-03-09T17:26:26.000Z | 2020-02-22T19:19:14.000Z |
ceilometerclient.Client('1', endpoint='http://172.29.86.41:35357/v2.0', username="admin", api_key="ADMIN_PASS")
carbohydrate-9662312c-a784-4c4d-b959-8ced233f8430:
from novaclient import client as novaclient
from ceilometerclient import client as ceilometerclient
from keystoneclient import client as keystoneclient
def get_token():
keystone = keystoneclient(username="admin", password="ADMIN_PASS", tenant_name="admin", auth_url="http://172.29.86.41:35357/v2.0")
token = keystone.service_catalog.catalog['token']['id']
return token
ceilometer = ceilometerclient(endpoint='http://172.29.86.41:8777', token=get_token())
ceilometer.
nova = novaclient.Client("1.1", username="admin", api_key="ADMIN_PASS", auth_url="http://10.0.120.143:35357/v2.0", project_id="admin")
nova = novaclient.Client("1.1", username=OS_USERNAME, api_key=OS_PASSWORD, auth_url=OS_ENDPOINT, project_id=OS_TENANT)
servers = nova.servers.list(detailed=True)
nova = novaclient.Client("2", auth_url="http://10.0.120.143:35357/v2.0", username="admin", api_key="ADMIN_PASS", project_id="admin" )
________
from glanceclient
____________
from ceilometerclient import client as ceilometerclient
ceilometer = ceilometerclient.get_client("2", os_auth_url="http://10.0.120.143:35357/v2.0", os_username="admin", os_password="ADMIN_PASS", os_tenant_name="admin" )
servers = ceilometer.meters.list()
ceilometer.meters.list(q=[{"field":"resource_id","op":"eq","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}])
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"28630164-5ef1-4a96-8b6e-96d0d7878cfa"}], groupby='metadata.flavor')
{"field":"duration_start","op":"gt","value":"2014-03-20T19:39:22"}],
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}])
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"duration_start","op":"gt","value":"2014-03-20T19:39:22"}])
{"field":"period_start","op":"gt","value":"2014-03-20T19:39:22"}
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"project_id","value":"admin"}], )
groupby=metadata.flavor&
{field=this,op=le,value=34}
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"28630164-5ef1-4a96-8b6e-96d0d7878cfa"}], period=600, groupby='instance_id')
>>> from ceilometerclient import client as ceilometerclient
>>> ceilometer = ceilometerclient.get_client("2", os_auth_url="http://10.0.120.143:35357/v2.0", os_username="admin", os_password="ADMIN_PASS", os_tenant_name="admin" )
>>> ceilometer.resource.get("3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'Client' object has no attribute 'resource'
>>> ceilometer.resources.get("3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0")
<Resource {u'project_id': u'10bed47042c548958046bd1f7b944039', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'links': [{u'href': u'http://controller:8777/v2/resources/3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'self'}, {u'href': u'http://controller:8777/v2/meters/instance?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'instance'}, {u'href': u'http://controller:8777/v2/meters/instance:m1.hadoop?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'instance:m1.hadoop'}, {u'href': u'http://controller:8777/v2/meters/disk.write.requests?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.write.requests'}, {u'href': u'http://controller:8777/v2/meters/disk.read.bytes?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.read.bytes'}, {u'href': u'http://controller:8777/v2/meters/cpu?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'cpu'}, {u'href': u'http://controller:8777/v2/meters/disk.write.bytes?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.write.bytes'}, {u'href': u'http://controller:8777/v2/meters/disk.read.requests?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.read.requests'}, {u'href': u'http://controller:8777/v2/meters/disk.write.requests.rate?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.write.requests.rate'}, {u'href': u'http://controller:8777/v2/meters/disk.read.bytes.rate?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.read.bytes.rate'}, {u'href': u'http://controller:8777/v2/meters/disk.write.bytes.rate?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.write.bytes.rate'}, {u'href': u'http://controller:8777/v2/meters/cpu_util?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'cpu_util'}, {u'href': u'http://controller:8777/v2/meters/disk.read.requests.rate?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.read.requests.rate'}], u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0'}>
>>> ceilometer.resources.get("3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0")
<Resource {u'project_id': u'10bed47042c548958046bd1f7b944039', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'links': [{u'href': u'http://controller:8777/v2/resources/3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'self'}, {u'href': u'http://controller:8777/v2/meters/instance?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'instance'}, {u'href': u'http://controller:8777/v2/meters/instance:m1.hadoop?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'instance:m1.hadoop'}, {u'href': u'http://controller:8777/v2/meters/cpu_util?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'cpu_util'}, {u'href': u'http://controller:8777/v2/meters/cpu?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'cpu'}], u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0'}>
>>> ceilometer.meters.list(q=[{"field":"resource_id","op":"eq","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}])
[<Meter {u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'name': u'instance', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'source': u'openstack', u'meter_id': u'M2FjM2FiNGMtZTZmMC00NTJkLWJmZDMtOWNiMWUxYTNjZmUwK2luc3RhbmNl\n', u'project_id': u'10bed47042c548958046bd1f7b944039', u'type': u'gauge', u'unit': u'instance'}>, <Meter {u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'name': u'instance:m1.hadoop', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'source': u'openstack', u'meter_id': u'M2FjM2FiNGMtZTZmMC00NTJkLWJmZDMtOWNiMWUxYTNjZmUwK2luc3RhbmNlOm0xLmhhZG9vcA==\n', u'project_id': u'10bed47042c548958046bd1f7b944039', u'type': u'gauge', u'unit': u'instance'}>, <Meter {u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'name': u'cpu_util', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'source': u'openstack', u'meter_id': u'M2FjM2FiNGMtZTZmMC00NTJkLWJmZDMtOWNiMWUxYTNjZmUwK2NwdV91dGls\n', u'project_id': u'10bed47042c548958046bd1f7b944039', u'type': u'gauge', u'unit': u'%'}>, <Meter {u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'name': u'cpu', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'source': u'openstack', u'meter_id': u'M2FjM2FiNGMtZTZmMC00NTJkLWJmZDMtOWNiMWUxYTNjZmUwK2NwdQ==\n', u'project_id': u'10bed47042c548958046bd1f7b944039', u'type': u'cumulative', u'unit': u'ns'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}])
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:45', u'message_id': u'25bbd7fc-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:40', u'message_id': u'22c15932-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:35', u'message_id': u'1fc85802-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:30', u'message_id': u'1ceeef6a-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:25', u'message_id': u'19cddcec-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:20', u'message_id': u'16d601ae-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:15', u'message_id': u'13d83e68-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:10', u'message_id': u'10de6624-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:05', u'message_id': u'0de16d2c-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:00', u'message_id': u'0ae5e274-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:55', u'message_id': u'07ebc2b4-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:50', u'message_id': u'04ee4d98-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:45', u'message_id': u'01f4c806-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:40', u'message_id': u'fef70c5e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:35', u'message_id': u'fbff9b6a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:30', u'message_id': u'f903d930-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:25', u'message_id': u'f602a2ac-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:20', u'message_id': u'f30b1084-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:15', u'message_id': u'f00c3b38-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:10', u'message_id': u'ed10d862-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:05', u'message_id': u'ea216fd6-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:59', u'message_id': u'e70e93be-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:55', u'message_id': u'e41f588c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:50', u'message_id': u'e1261f3a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:45', u'message_id': u'de2885b6-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:40', u'message_id': u'db3097fe-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:35', u'message_id': u'd832dc6a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:30', u'message_id': u'd5398572-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:25', u'message_id': u'd23d868e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:20', u'message_id': u'cf43a9cc-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:15', u'message_id': u'cc46da1e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:10', u'message_id': u'c949af12-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:04', u'message_id': u'c64de3aa-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:00', u'message_id': u'c356736a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:54', u'message_id': u'c0554be6-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:49', u'message_id': u'bd5d4b64-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:44', u'message_id': u'ba5fcb44-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:39', u'message_id': u'b7644276-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:34', u'message_id': u'b468cfd8-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:29', u'message_id': u'b16d4e08-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:24', u'message_id': u'ae7190ec-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:19', u'message_id': u'ab778e32-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:14', u'message_id': u'a878a4be-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:09', u'message_id': u'a58056f8-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:04', u'message_id': u'a282fce4-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:59', u'message_id': u'9f8780a0-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:54', u'message_id': u'9c8e3f60-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:49', u'message_id': u'999260fc-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:44', u'message_id': u'969a0580-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:39', u'message_id': u'939d3690-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:34', u'message_id': u'90a1174a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:29', u'message_id': u'8da41966-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:24', u'message_id': u'8aa5a3ce-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:19', u'message_id': u'87ad520c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:14', u'message_id': u'84b3135c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:09', u'message_id': u'81b7968c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:04', u'message_id': u'7ebac788-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:59', u'message_id': u'7bbd8138-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:54', u'message_id': u'78c2fe7c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:49', u'message_id': u'75c63f86-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:44', u'message_id': u'72cd91a8-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:40', u'message_id': u'6fe17b9e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:34', u'message_id': u'6cd572c0-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:25', u'message_id': u'6764fb6c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:48:22', u'message_id': u'41f40756-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:48:17', u'message_id': u'3ef9decc-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:48:13', u'message_id': u'3c154bce-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:48:07', u'message_id': u'3903f85e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}], limit=3)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:55:30', u'message_id': u'40924a84-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:55:25', u'message_id': u'3da138d0-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:55:20', u'message_id': u'3a9b369a-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}], limit=1)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:55:40', u'message_id': u'468ac984-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}], limit=1)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:57:30', u'message_id': u'882e91fe-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 2.625, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", limit=1)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3a4356d7-b877-4065-b25e-fca8e3651f30', u'timestamp': u'2014-03-24T20:02:51', u'message_id': u'47703fae-b38f-11e3-b7fa-0025b52001bf', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.049999999999999996, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Seph-002', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'348f40c5ef2be79e9aa9e1a69a48f4e9b3809661aff4c090126dd6ae', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053c', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", limit=2)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3a4356d7-b877-4065-b25e-fca8e3651f30', u'timestamp': u'2014-03-24T20:03:06', u'message_id': u'5060c822-b38f-11e3-b7fa-0025b52001bf', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.049999999999999996, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Seph-002', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'348f40c5ef2be79e9aa9e1a69a48f4e9b3809661aff4c090126dd6ae', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053c', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'bdd20e18-3d41-4ddd-ba65-2c0a967cb678', u'timestamp': u'2014-03-24T20:03:06', u'message_id': u'5079ccc8-b38f-11e3-b7fa-0025b52001bf', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'0', u'flavor.vcpus': u'2', u'flavor.ephemeral': u'0', u'display_name': u'HiBench_DO_NOT_DELETE', u'flavor.id': u'3', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.medium', u'disk_gb': u'40', u'kernel_id': u'None', u'image.id': u'b09a0b3b-5ef0-4752-9f05-68e7043e7504', u'flavor.ram': u'4096', u'host': u'348f40c5ef2be79e9aa9e1a69a48f4e9b3809661aff4c090126dd6ae', u'image.name': u'Ubuntu Precise', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/b09a0b3b-5ef0-4752-9f05-68e7043e7504', u'cpu_number': u'2', u'flavor.disk': u'40', u'root_gb': u'40', u'name': u'instance-00000539', u'memory_mb': u'4096', u'instance_type': u'3', u'vcpus': u'2', u'image_ref': u'b09a0b3b-5ef0-4752-9f05-68e7043e7504'}, u'counter_type': u'gauge'}>]
from novaclient.v1_1 import client
nova=client.Client("admin", "ubuntu", "admin", "http://controller:35357/v2.0")
nova.flavours.list()
from novaclient.client import Client
nova = Client(1.1,"admin", "ubuntu", "admin", "http://controller:35357/v2.0")
nova.servers.list()
from novaclient.client import Client
nova = Client(1.1,"admin", "ADMIN_PASS", "admin", "http://controller:35357/v2.0")
nova.servers.list()
(VERSION, USERNAME, PASSWORD, PROJECT_ID, AUTH_URL)
DUMP:
>>> import keystoneclient.v2_0.client as ksclient
>>> auth_url = "http://192.168.255.191:35357/v2.0"
>>> username = "admin"
>>> password = "ubuntu"
>>> tenant_name = "admin"
>>> keystone = ksclient.Client(auth_url=auth_url, username=username, password=password, tenant_name=tenant_name)
>>> keystone.auth_token
u'MIINsAYJKoZIhvcNAQcCoIINoTCCDZ0CAQExCTAHBgUrDgMCGjCCDAYGCSqGSIb3DQEHAaCCC-cEggvzeyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNC0wMy0xN1QxNjo0MDo1Ni42NTY0NTAiLCAiZXhwaXJlcyI6ICIyMDE0LTAzLTE4VDE2OjQwOjU2WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogbnVsbCwgImVuYWJsZWQiOiB0cnVlLCAiaWQiOiAiNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAibmFtZSI6ICJhZG1pbiJ9fSwgInNlcnZpY2VDYXRhbG9nIjogW3siZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo4Nzc0L3YyLzQ5YjI0YTA4NzlmZjQ3ODY5ZjBkOWNmMTQ3NTU2ZjgzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo4Nzc0L3YyLzQ5YjI0YTA4NzlmZjQ3ODY5ZjBkOWNmMTQ3NTU2ZjgzIiwgImlkIjogIjA5ZGIwYTUwOGFhYjRlMWViOGRhMTY0NzVjOGJiZWViIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzQvdjIvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZSIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzYvdjIvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzYvdjIvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAiaWQiOiAiYmVkYzkwNGQ5MGZmNDNlY2I5MDlkNjAxODFmM2VmYTciLCAicHVibGljVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3Ni92Mi80OWIyNGEwODc5ZmY0Nzg2OWYwZDljZjE0NzU1NmY4MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWV2MiIsICJuYW1lIjogImNpbmRlciJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3NC92MyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3NC92MyIsICJpZCI6ICI1MGQ1MTA2Nzc3MjY0MWNmOWRjMjExYzNkNzJlNDUxNCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo4Nzc0L3YzIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImNvbXB1dGV2MyIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjMzMzMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjMzMzMiLCAiaWQiOiAiMmVhYTAzNDRjNzMzNDlkZTljYmFjMWU5NTIzNTQ0Y2QiLCAicHVibGljVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6MzMzMyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJzMyIsICJuYW1lIjogInMzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo5MjkyIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo5MjkyIiwgImlkIjogIjBiMjhiNWVlNTg3YzQ2N2Q4ODMwNjc1YTNkNjBlODc5IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjkyOTIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaW1hZ2UiLCAibmFtZSI6ICJnbGFuY2UifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzYvdjEvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzYvdjEvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAiaWQiOiAiODViODRjMTJlYjQyNDVkZDk2MmZhZWEyNDg2ODM1ZTciLCAicHVibGljVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3Ni92MS80OWIyNGEwODc5ZmY0Nzg2OWYwZDljZjE0NzU1NmY4MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWUiLCAibmFtZSI6ICJjaW5kZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzMvc2VydmljZXMvQWRtaW4iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzMvc2VydmljZXMvQ2xvdWQiLCAiaWQiOiAiNWY5NDg1ZjlhYTk5NGQ4M2I4MTg2MWM0N2EyZDE5NTciLCAicHVibGljVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3My9zZXJ2aWNlcy9DbG91ZCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJlYzIiLCAibmFtZSI6ICJlYzIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjUwMDAvdjIuMCIsICJpZCI6ICIzM2QyZGE0ZTliOWU0MGExOTY1YThkMGQ4NmY1MTQyOCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo1MDAwL3YyLjAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaWRlbnRpdHkiLCAibmFtZSI6ICJrZXlzdG9uZSJ9XSwgInVzZXIiOiB7InVzZXJuYW1lIjogImFkbWluIiwgInJvbGVzX2xpbmtzIjogW10sICJpZCI6ICJkMDEwYjc2NTQ3Mjg0M2E1OTY5MmU1MDY0MmVmZmM2YiIsICJyb2xlcyI6IFt7Im5hbWUiOiAiYWRtaW4ifV0sICJuYW1lIjogImFkbWluIn0sICJtZXRhZGF0YSI6IHsiaXNfYWRtaW4iOiAwLCAicm9sZXMiOiBbIjYyNzEzYzk1ZGEzMDQwNjBhYjRkYWNhZGI3MmE1N2ZkIl19fX0xggGBMIIBfQIBATBcMFcxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVVbnNldDEOMAwGA1UEBwwFVW5zZXQxDjAMBgNVBAoMBVVuc2V0MRgwFgYDVQQDDA93d3cuZXhhbXBsZS5jb20CAQEwBwYFKw4DAhowDQYJKoZIhvcNAQEBBQAEggEAlU5wvh7RKqiBtweHRf5WL2Mdd3FpKH3mzyYjmQNSnq3T1qdLjw9OZqTXoPD34guTrfT+9wyZUI83gEd0jVB8jW754iAP5sFXeEZfY2zl7R20duBdNwYtYecE-VpAjLHguNL5vSNNffrqDwX-g--OVdGzDfCItRthCrR1e4Xlsc1AIlVHfL3GkGllp6s+d06PkLrT72hCcqq7+8uA97eCa32aLDnrHTp-ZZbWAWk2m5jjb-iMp7IiM3lSjKSrx-bzuK4lkrWzXYgpbDMExeU669hLv39OlqaPp+TkumH0f6wBjuPCufvIkoT7OJynWAWNeliHoWRKtAgOA2PUeh6zeg=='
>>> from novaclient import client as novaclient
>>> nova = novaclient.Client("1.1", auth_url=auth_url, username=username, password=password, tenant_name=tenant_name)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/opt/stack/python-novaclient/novaclient/client.py", line 506, in Client
return client_class(*args, **kwargs)
TypeError: __init__() got an unexpected keyword argument 'tenant_name'
>>> nova = novaclient.Client("1.1", username=username, api_key=password, auth_url=auth_url, project_id=tenant_name)
>>> nova.servers.list()
[]
>>> nova.servers.list()
[<Server: blarg-9a77ff67-53e8-4abe-a3fe-9a77405d03c8>, <Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> server = nova.servers.find(name="blarg-9a77ff67-53e8-4abe-a3fe-9a77405d03c8")
>>> server.delete()
>>> nova.servers.list.details()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute 'details'
>>> server = nova.servers.find(name="blarg-54ba2260-dde5-4953-a135-01b81b80f96a")
>>> server.diagnostics()
(<Response [200]>, None)
>>> server.networks()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'dict' object is not callable
>>> server.list_security_group()
[<SecurityGroup description=default, id=1, name=default, rules=[], tenant_id=49b24a0879ff47869f0d9cf147556f83>]
>>> nova.servers.list(detailed=True)
[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> nova.servers.list(detailed="True")
[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> server.list(detailed="True")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/opt/stack/python-novaclient/novaclient/openstack/common/apiclient/base.py", line 464, in __getattr__
raise AttributeError(k)
AttributeError: list
>>> str(nova.servers.list())
'[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]'
>>> nova.servers.list(detailed=True)
[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> server = nova.servers.list(detailed=True)
>>> print(server)
[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> print(server[1])
<Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>
>>> print(vars(server[1]))
{'OS-EXT-STS:task_state': None, 'addresses': {u'private': [{u'OS-EXT-IPS-MAC:mac_addr': u'fa:16:3e:59:90:a5', u'version': 4, u'addr': u'10.0.0.4', u'OS-EXT-IPS:type': u'fixed'}]}, 'links': [{u'href': u'http://192.168.255.191:8774/v2/49b24a0879ff47869f0d9cf147556f83/servers/54ba2260-dde5-4953-a135-01b81b80f96a', u'rel': u'self'}, {u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/servers/54ba2260-dde5-4953-a135-01b81b80f96a', u'rel': u'bookmark'}], 'image': {u'id': u'c1faa392-1a44-4ae1-aac1-cec18184d011', u'links': [{u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/images/c1faa392-1a44-4ae1-aac1-cec18184d011', u'rel': u'bookmark'}]}, 'manager': <novaclient.v1_1.servers.ServerManager object at 0x2c6b290>, 'OS-EXT-STS:vm_state': u'stopped', 'OS-EXT-SRV-ATTR:instance_name': u'instance-00000001', 'OS-SRV-USG:launched_at': u'2014-03-17T17:16:07.000000', 'flavor': {u'id': u'84', u'links': [{u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/flavors/84', u'rel': u'bookmark'}]}, 'id': u'54ba2260-dde5-4953-a135-01b81b80f96a', 'security_groups': [{u'name': u'default'}], 'user_id': u'd010b765472843a59692e50642effc6b', 'OS-DCF:diskConfig': u'MANUAL', 'accessIPv4': u'', 'accessIPv6': u'', 'OS-EXT-STS:power_state': 4, 'OS-EXT-AZ:availability_zone': u'nova', 'config_drive': u'', 'status': u'SHUTOFF', 'updated': u'2014-03-17T17:17:19Z', 'hostId': u'38648e03ba0f2467f3f31f6397289dd219c364264d8b9c905fe63fb5', 'OS-EXT-SRV-ATTR:host': u'ubuntu', 'OS-SRV-USG:terminated_at': None, 'key_name': None, 'OS-EXT-SRV-ATTR:hypervisor_hostname': u'ubuntu', 'name': u'blarg-54ba2260-dde5-4953-a135-01b81b80f96a', 'created': u'2014-03-17T17:15:24Z', 'tenant_id': u'49b24a0879ff47869f0d9cf147556f83', 'os-extended-volumes:volumes_attached': [], '_info': {u'OS-EXT-STS:task_state': None, u'addresses': {u'private': [{u'OS-EXT-IPS-MAC:mac_addr': u'fa:16:3e:59:90:a5', u'version': 4, u'addr': u'10.0.0.4', u'OS-EXT-IPS:type': u'fixed'}]}, u'links': [{u'href': u'http://192.168.255.191:8774/v2/49b24a0879ff47869f0d9cf147556f83/servers/54ba2260-dde5-4953-a135-01b81b80f96a', u'rel': u'self'}, {u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/servers/54ba2260-dde5-4953-a135-01b81b80f96a', u'rel': u'bookmark'}], u'image': {u'id': u'c1faa392-1a44-4ae1-aac1-cec18184d011', u'links': [{u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/images/c1faa392-1a44-4ae1-aac1-cec18184d011', u'rel': u'bookmark'}]}, u'OS-EXT-STS:vm_state': u'stopped', u'OS-EXT-SRV-ATTR:instance_name': u'instance-00000001', u'OS-SRV-USG:launched_at': u'2014-03-17T17:16:07.000000', u'flavor': {u'id': u'84', u'links': [{u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/flavors/84', u'rel': u'bookmark'}]}, u'id': u'54ba2260-dde5-4953-a135-01b81b80f96a', u'security_groups': [{u'name': u'default'}], u'user_id': u'd010b765472843a59692e50642effc6b', u'OS-DCF:diskConfig': u'MANUAL', u'accessIPv4': u'', u'accessIPv6': u'', u'OS-EXT-STS:power_state': 4, u'OS-EXT-AZ:availability_zone': u'nova', u'config_drive': u'', u'status': u'SHUTOFF', u'updated': u'2014-03-17T17:17:19Z', u'hostId': u'38648e03ba0f2467f3f31f6397289dd219c364264d8b9c905fe63fb5', u'OS-EXT-SRV-ATTR:host': u'ubuntu', u'OS-SRV-USG:terminated_at': None, u'key_name': None, u'OS-EXT-SRV-ATTR:hypervisor_hostname': u'ubuntu', u'name': u'blarg-54ba2260-dde5-4953-a135-01b81b80f96a', u'created': u'2014-03-17T17:15:24Z', u'tenant_id': u'49b24a0879ff47869f0d9cf147556f83', u'os-extended-volumes:volumes_attached': [], u'metadata': {}}, 'metadata': {}, '_loaded': True} | 697.064706 | 86,836 | 0.751715 | 18,478 | 118,501 | 4.713659 | 0.025706 | 0.022319 | 0.017681 | 0.053043 | 0.91063 | 0.897106 | 0.891227 | 0.885303 | 0.881652 | 0.877324 | 0 | 0.228606 | 0.05216 | 118,501 | 170 | 86,837 | 697.064706 | 0.546848 | 0 | 0 | 0.229508 | 0 | 0.196721 | 0.713283 | 0.298093 | 0 | 1 | 0.000076 | 0 | 0 | 0 | null | null | 0.106557 | 0.081967 | null | null | 0.02459 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 13 |
8ff3bdda674796924b0e68c0b221221c50c71a48 | 79,654 | py | Python | src/BAMS_Thesaurus_All_Encompassing.py | rsoscia/BAMS-to-NeuroLex | e6c3b23725e63c0c9a70a7ef8c7a9ca0789ae153 | [
"MIT"
] | 1 | 2015-11-10T05:20:20.000Z | 2015-11-10T05:20:20.000Z | src/BAMS_Thesaurus_All_Encompassing.py | rsoscia/BAMS-to-NeuroLex | e6c3b23725e63c0c9a70a7ef8c7a9ca0789ae153 | [
"MIT"
] | null | null | null | src/BAMS_Thesaurus_All_Encompassing.py | rsoscia/BAMS-to-NeuroLex | e6c3b23725e63c0c9a70a7ef8c7a9ca0789ae153 | [
"MIT"
] | null | null | null | #This is an all encompassing program that does everything at once, hopefully placing all
#of the BAMS query results into a single CSV file
#doesn't run properly unless the path is accessed first, interactive python is activated,
#and the code is pasted into terminal..
#Only run the below persist section once:
#Persist Begin
#For Parsing
import rdflib
from rdflib import plugin
#for getting the length of the files
import os
#for working with tempfiles
import os.path as op
import tempfile
#For Unzipping
import zipfile
from StringIO import StringIO
plugin.register(
'sparql', rdflib.query.Processor,
'rdfextras.sparql.processor', 'Processor')
plugin.register(
'sparql', rdflib.query.Result,
'rdfextras.sparql.query', 'SPARQLQueryResult')
zipdata = StringIO()
# open the file using a relative path
#r = open("../Data/BAMS1.zip")
# adding the BAMS Thesaurus instead of the more limited set of data:
#r = open("../Data/bams_thesaurus_2013-09-24_17-12-40.xml.zip")
# Fixed RDF
r = open("../Data/bams_thesaurus_2013-10-06_14-58-56.xml.zip")
#ADDITIONAL CONTENT
#r = open("../Data/bams_ontology_2013-10-16_20-34-52.xml.zip")
# zipdata is a buffer holding the contents of the zip file in memory
zipdata.write(r.read())
print("~40 seconds for zip to open...")
#myzipfile opens the contents of the zip file as an object that knows how to unzip
myzipfile = zipfile.ZipFile(zipdata)
#grab the contents out of myzipfile by name
#foofile = myzipfile.open('bams_ontology_2013-07-10_03-20-00.xml')
#changing the foofile to be the file we upen above^^^^^ in r = open()....etc.
#foofile = myzipfile.open('bams_thesaurus_2013-09-24_17-12-40.xml')
# Fixed RDF
foofile = myzipfile.open('bams_thesaurus_2013-10-06_14-58-56.xml')
#ADDITIONAL CONTENT
#foofile = myzipfile.open('bams_ontology_2013-10-16_20-34-52.xml')
print("loading up the BAMS file in memory...")
#Get a Graph object using a Sleepycat persistent store
g = rdflib.Graph('Sleepycat',identifier='BAMS')
# first time create the store
# put the store in a temp directory so it doesn't get confused with stuff we should commit
tempStore = op.join( tempfile.gettempdir(), 'myRDF_BAMS_Store')
g.open(tempStore, create = True)
#pull in the BAMS RDF document, parse, and store.
#result = g.parse(file=myzipfile.open('bams_ontology_2013-07-10_03-20-00.xml'), format="application/rdf+xml")
#do the same thing but with the BAMS thesaurus file
#result = g.parse(file=myzipfile.open('bams_thesaurus_2013-09-24_17-12-40.xml'), format="application/rdf+xml")
# Fixed RDF
result = g.parse(file=myzipfile.open('bams_thesaurus_2013-10-06_14-58-56.xml'), format="application/rdf+xml")
#ADDITIONAL CONTENT
#result = g.parse(file=myzipfile.open('bams_ontology_2013-10-16_20-34-52.xml'), format="application/rdf+xml")
foofile.close()
# when done!
g.close()
print("Graph stored to disk")
#WORKS PERFECTLY
#Persist End
#########################################################################################
#For Parsing
import rdflib
from rdflib import plugin
#for getting the length of the files
import os
#for working with tempfiles
import os.path as op
import tempfile
#for csv output
import csv
plugin.register(
'sparql', rdflib.query.Processor,
'rdfextras.sparql.processor', 'Processor')
plugin.register(
'sparql', rdflib.query.Result,
'rdfextras.sparql.query', 'SPARQLQueryResult')
#Get a Graph object
g = rdflib.Graph('Sleepycat',identifier='BAMS')
print("loading up the BAMS file in memory...")
# assumes myRDF_BAMS_Store has been created
tempStore = op.join( tempfile.gettempdir(), 'myRDF_BAMS_Store')
g.open(tempStore)
print("going to get results...")
print("The graph has " + str(len(g)) + " items in it")
#BAMS Thesaurus content has 3797 items in it
#additional BAMS content (graph) has 167178 items in it
#BASAL GANGLIA OF TELENCEPHALON QUERY:
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia of telencephalon" .
?subject ?predicate ?object
}""")
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-of-telencephalon/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-of-telencephalon/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-of-telencephalon/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/ranson-sw-1920/">Ranson, 1920</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-of-telencephalon/ http://brancusi1.usc.edu/RDF/definition For macrodissected adult humans it includes the caudate and lentiform (putamen and globus pallidus) nuclei, amygdala, and claustrum (p. 252) and is thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>. More recently it was used in Ranson's sense by for example Clark (1951, p. 968).
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-of-telencephalon/ http://brancusi1.usc.edu/RDF/entry Basal ganglia of telencephalon
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-of-telencephalon/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-of-telencephalon
qres = g.query(
"""SELECT ?subject ?predicate
WHERE {
?subject ?predicate ?text .
FILTER regex(?text, "^basal", "i")
}""")
for r in qres.result:
print str(r[0]), str(r[1])
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/carpenter-mb-1976/">Carpenter, 1976</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/definition For macrodissected adult humans it includes the caudate and lenticular nuclei and the amygdala, and is thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; p. 496.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-4
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/ferrier-d-1876/">Ferrier, 1876</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/definition In modern terms includes for macrodissected adult monkeys and humans the <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a> and <a href="/thesaurus/definition/interbrain/"><span class="synonim_bold">interbrain (Baer, 1837)</span></a> considered together; pp. 8, 236.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/slug basal-ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/strong-os-elwyn-a-1943/">Strong & Elwyn, 1943</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/definition Synonym for basal ganglia of telencephalon (Ranson, 1920) in macrodissected adult humans, and thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; p. 319.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-2
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/warwick-r-williams-pl-eds-1973/">Warwick & Williams, 1973</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/definition Synonym for <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; see Warwick & Williams (1973, p. 805; and Williams & Warwick, 1980, p. 864). Its use is discouraged because reference to <a href="/thesaurus/definition/ganglia/"><span class="synonim_bold">ganglia (Galen, c173)</span></a> in the <a href="/thesaurus/definition/cerebrospinal-axis/"><span class="synonim_bold">cerebrospinal axis (Meckel, 1817)</span></a> is archaic; and because "basal ganglia" today usually refers to a functional system that includes components in the <a href="/thesaurus/definition/forebrain-2/"><span class="synonim_bold">forebrain (Goette, 1873)</span></a> and <a href="/thesaurus/definition/midbrain/"><span class="synonim_bold">midbrain (Baer, 1837)</span></a>, rather than to a <a href="/thesaurus/definition/topographic-division/"><span class="synonim_bold">topographic division</span></a> of the <a href="/thesaurus/definition/endbrain/"><span class="synonim_bold">endbrain (Kuhlenbeck, 1927)</span></a>; see Anthoney (1994, pp. 106-109), DeLong & Wichmann (2007), and Federative Committee on Anatomical Terminology (1998, *A14.1.09.501).
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-3
qres = g.query(
"""SELECT ?predicate ?object
WHERE {
_:http://brancusi1.usc.edu/RDF/thesaurus ?predicate ?object .
} LIMIT 5""")
for r in qres.result:
print str(r[0]), str(r[1])
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/carpenter-mb-1976/">Carpenter, 1976</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/definition For macrodissected adult humans it includes the caudate and lenticular nuclei and the amygdala, and is thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; p. 496.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-4
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/ferrier-d-1876/">Ferrier, 1876</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/definition In modern terms includes for macrodissected adult monkeys and humans the <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a> and <a href="/thesaurus/definition/interbrain/"><span class="synonim_bold">interbrain (Baer, 1837)</span></a> considered together; pp. 8, 236.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/slug basal-ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/strong-os-elwyn-a-1943/">Strong & Elwyn, 1943</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/definition Synonym for basal ganglia of telencephalon (Ranson, 1920) in macrodissected adult humans, and thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; p. 319.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-2
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/warwick-r-williams-pl-eds-1973/">Warwick & Williams, 1973</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/definition Synonym for <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; see Warwick & Williams (1973, p. 805; and Williams & Warwick, 1980, p. 864). Its use is discouraged because reference to <a href="/thesaurus/definition/ganglia/"><span class="synonim_bold">ganglia (Galen, c173)</span></a> in the <a href="/thesaurus/definition/cerebrospinal-axis/"><span class="synonim_bold">cerebrospinal axis (Meckel, 1817)</span></a> is archaic; and because "basal ganglia" today usually refers to a functional system that includes components in the <a href="/thesaurus/definition/forebrain-2/"><span class="synonim_bold">forebrain (Goette, 1873)</span></a> and <a href="/thesaurus/definition/midbrain/"><span class="synonim_bold">midbrain (Baer, 1837)</span></a>, rather than to a <a href="/thesaurus/definition/topographic-division/"><span class="synonim_bold">topographic division</span></a> of the <a href="/thesaurus/definition/endbrain/"><span class="synonim_bold">endbrain (Kuhlenbeck, 1927)</span></a>; see Anthoney (1994, pp. 106-109), DeLong & Wichmann (2007), and Federative Committee on Anatomical Terminology (1998, *A14.1.09.501).
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-3
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/carpenter-mb-1976/">Carpenter, 1976</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/definition For macrodissected adult humans it includes the caudate and lenticular nuclei and the amygdala, and is thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; p. 496.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-4
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/ferrier-d-1876/">Ferrier, 1876</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/definition In modern terms includes for macrodissected adult monkeys and humans the <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a> and <a href="/thesaurus/definition/interbrain/"><span class="synonim_bold">interbrain (Baer, 1837)</span></a> considered together; pp. 8, 236.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/slug basal-ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/strong-os-elwyn-a-1943/">Strong & Elwyn, 1943</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/definition Synonym for basal ganglia of telencephalon (Ranson, 1920) in macrodissected adult humans, and thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; p. 319.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-2
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/warwick-r-williams-pl-eds-1973/">Warwick & Williams, 1973</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/definition Synonym for <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; see Warwick & Williams (1973, p. 805; and Williams & Warwick, 1980, p. 864). Its use is discouraged because reference to <a href="/thesaurus/definition/ganglia/"><span class="synonim_bold">ganglia (Galen, c173)</span></a> in the <a href="/thesaurus/definition/cerebrospinal-axis/"><span class="synonim_bold">cerebrospinal axis (Meckel, 1817)</span></a> is archaic; and because "basal ganglia" today usually refers to a functional system that includes components in the <a href="/thesaurus/definition/forebrain-2/"><span class="synonim_bold">forebrain (Goette, 1873)</span></a> and <a href="/thesaurus/definition/midbrain/"><span class="synonim_bold">midbrain (Baer, 1837)</span></a>, rather than to a <a href="/thesaurus/definition/topographic-division/"><span class="synonim_bold">topographic division</span></a> of the <a href="/thesaurus/definition/endbrain/"><span class="synonim_bold">endbrain (Kuhlenbeck, 1927)</span></a>; see Anthoney (1994, pp. 106-109), DeLong & Wichmann (2007), and Federative Committee on Anatomical Terminology (1998, *A14.1.09.501).
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-3
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/carpenter-mb-1976/">Carpenter, 1976</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/definition For macrodissected adult humans it includes the caudate and lenticular nuclei and the amygdala, and is thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; p. 496.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-4/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-4
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/ferrier-d-1876/">Ferrier, 1876</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/definition In modern terms includes for macrodissected adult monkeys and humans the <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a> and <a href="/thesaurus/definition/interbrain/"><span class="synonim_bold">interbrain (Baer, 1837)</span></a> considered together; pp. 8, 236.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia/ http://brancusi1.usc.edu/RDF/slug basal-ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/strong-os-elwyn-a-1943/">Strong & Elwyn, 1943</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/definition Synonym for basal ganglia of telencephalon (Ranson, 1920) in macrodissected adult humans, and thus not synonymous with <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; p. 319.
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-2/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-2
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/thesaurus
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/reference <a target="_blank" href="/thesaurus/reference/warwick-r-williams-pl-eds-1973/">Warwick & Williams, 1973</a>
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/definition Synonym for <a href="/thesaurus/definition/cerebral-nuclei/"><span class="synonim_bold">cerebral nuclei (Swanson, 2000)</span></a>; see Warwick & Williams (1973, p. 805; and Williams & Warwick, 1980, p. 864). Its use is discouraged because reference to <a href="/thesaurus/definition/ganglia/"><span class="synonim_bold">ganglia (Galen, c173)</span></a> in the <a href="/thesaurus/definition/cerebrospinal-axis/"><span class="synonim_bold">cerebrospinal axis (Meckel, 1817)</span></a> is archaic; and because "basal ganglia" today usually refers to a functional system that includes components in the <a href="/thesaurus/definition/forebrain-2/"><span class="synonim_bold">forebrain (Goette, 1873)</span></a> and <a href="/thesaurus/definition/midbrain/"><span class="synonim_bold">midbrain (Baer, 1837)</span></a>, rather than to a <a href="/thesaurus/definition/topographic-division/"><span class="synonim_bold">topographic division</span></a> of the <a href="/thesaurus/definition/endbrain/"><span class="synonim_bold">endbrain (Kuhlenbeck, 1927)</span></a>; see Anthoney (1994, pp. 106-109), DeLong & Wichmann (2007), and Federative Committee on Anatomical Terminology (1998, *A14.1.09.501).
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/entry Basal ganglia
http://brancusi1.usc.edu/thesaurus/definition/basal-ganglia-3/ http://brancusi1.usc.edu/RDF/slug basal-ganglia-3
import csv
for r in qres.result:
print str(r[1][0])
c = csv.writer(open("BAMS_Thesaurus_Data4Upload.csv","wb"))
c.writerows(qres.result)
#The data from the above query is stored in BAMS_Thesaurus_Data4Upload.csv
# for results that are objects -- store some place
# for results that are subjects -- store some place
# for results that are predicates -- store some place
#subject of first triple
print qres.result[0][0]
#predicate of first triple
print qres.result[0][1]
#object of first triple
print qres.result[0][2]
#all subjects of query
for r in qres.result:
print str(r[0])
#First Triple
print qres.result[0][0]
print qres.result[0][1]
print qres.result[0][2]
#Second Triple
print qres.result[1][0]
print qres.result[1][1]
print qres.result[1][2]
with open('mycsvfileV1.csv', 'wb') as f: # Just use 'w' mode in 3.x
#First Entire Triple, Second Entire Triple, Third Entire Triple.....
#BAMS_Dict = {"Subject": qres.result[0], "Predicate": qres.result[1], "Object": qres.result[2]}
#Subject Of First Triple, Predicate Of First Triple, Object Of First Triple.....
BAMS_Dict = {"Subject": qres.result[0][0], "Predicate": qres.result[0][1], "Object": qres.result[0][2]}
w = csv.DictWriter(f, BAMS_Dict.keys())
w.writeheader()
w.writerow(BAMS_Dict)
#Check To See If A DictWriter Library Of Some Sort Is Required For Access To These Methods
#for row in BAMS_DICT:
#out_f.write("%s%s" %(delimiter.join([row[name] for name in f]), lineterminator))
DictWriter.writerows(...)
#w.writerows(qres.result)
#Work Up To Here
#Continue Adding The Additional RDF Content Under the BAMS Dict.. Read documentation before implementing...
for r in qres.result:
b = iter(r).next()
w.writerow(b)
for r in qres.result:
b = iter(r).next()
w.writerow(b)
for r in qres.result:
b = iter(r).next()
print b
b = iter(qres.result).next()
print b
for r in qres.result:
b = iter(r).next()
print b
#Below is strictly experimental stuff.. and it gets messy
###############################################
emptyList = []
for r in qres.result:
#print str(r[0]), str(r[1]), str(r[2])
print str(r[0][0]) #gives the first position in the first tripple "h" for the url
c = csv.writer(open("BAMS_Thesaurus_Data4Upload.csv","wb"))
c.writerows(qres.result) #writes all of the data in a triple format (S, P, O)
#z = 48
#xx = 0
#for z in str(r[0]):
str(r[0])
counter = 0
for r in qres.result:
#z = 48
#if r >=z:
#print r[0][z]
#z = z + 1
print r[0]
counter = counter + 1
while counter > 46:
print str(r[i][counter])
####
####
###
#### left off here trying to parse new lists created to remove the http:// prefix on a lot of the subjects, etc. in the tripple
###
###
###
for z in str(r[0]):
#str(z)
#print z
#x=x+1
#print x
if z > 48:
#declare an array and put items into it as needed.
list(str(z))
emptyList.append(str(z))
print emptyList
#now the data is already written -- all of it.
#next we open the file again, read it, and rewrite it.
i = -1
while 0 > r+1:
with open('BAMS_Thesaurus_Data4Upload.csv', 'rb') as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read(100))
csvfile.seek(46)
reader = csv.reader(csvfile, dialect)
#register 'dialect' as a new dialect:
csv.register_dialect('dialect' delimiter=':', quoting=csv.QUOTE_NONE)
c.writerows(qres.result)
i+=1
#i = i+r
with open('BAMS_Thesaurus_Data4Upload.csv', 'wb') as csvfile:
orgWriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(['Spam'] * 5 + ['Baked Beans'])
spamwriter.writerow(['Spam', 'Lovely Spam', 'Wonderful Spam'])
import csv
csv.register_dialect('unixpwd', delimiter=':', quoting=csv.QUOTE_NONE)
with open('passwd', 'rb') as f:
reader = csv.reader(f, 'unixpwd')
csv.list_dialects()
# Use this as the parsing prefix:
# http://brancusi1.usc.edu/RDF/
# make the dialects refer to the strings after the above prefix
import csv
for r in qres.result:
c = csv.writer(open("BAMS_Thesaurus_Data4Upload.csv","wb"))
c.writerows(qres.result)
csv.list_dialects() #works
####c.list_dialects() #does not work
print str(csv.get_dialect('excel-tab'))
print str(csv.get_dialect('excel'))
#develop a sniffer that can read the prefix http://brancusi1.usc.edu/RDF/....etc.
#start at the string after the prefix http://brancusi1.usc.edu/RDF/
#csv.Sniffer.sniff(csv.read(1024))
with open('BAMS_Thesaurus_Data4Upload.csv', 'rb') as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read(1024))
csvfile.seek(0)
reader = csv.reader(csvfile, dialect)
#print str(r[0]), str(r[1]), str(r[2])
c = csv.writer(open("BAMS_Thesaurus_Data4Upload.csv","wb"))
#c.read()
# gives us the triple info in each cell (notice it's not in string format) it's pretty ugly
#c.writerow(qres.result)
# regardless of the format, i'm going to index this first
# figure out how to place at the next
# need to access each individual part of the triple
# making row plural allows for this type of functionality
#################################################################
#csv.DictWriter.writeheader('subject', 'predicate', 'object')
#################################################################
c.writerows(qres.result)
dialect = c.Sniffer().sniff(c.read(1024))
c.seek(0)
reader = csv.reader(c, dialect)
print str(reader)
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
#BASAL GANGLIA QUERY:
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
#BASAL NUCLEI QUERY:
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal nuclei" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
#Test Query:
#qres = g.query(
# """SELECT ?subject ?predicate
# WHERE {
# ?subject ?predicate ?text .
# FILTER regex(?text, "^basal", "i")
# }""")
#
#for r in qres.result:
# print str(r[0]), str(r[1])
qres = g.query(
"""SELECT ?subject ?predicate
WHERE {
?subject ?predicate ?text .
FILTER regex(?text, "^basal", "i")
}""")
for r in qres.result:
print str(r[0]), str(r[1])
#WORKS
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:term "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
#DOESN'T WORK
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
#DOESN'T WORK
#Query:
qres = g.query(
"""SELECT ?subject ?predicate ?text
WHERE {
?subject ?predicate ?text .
FILTER regex(?text, "^basal", "i")
} LIMIT 10""")
for r in qres.result:
print str(r[0]), str(r[1])
#works but is not very useful
#Results:
http://brancusi1.usc.edu/brain_parts/BASAL-AMYGDALOID-NUCLEUS/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-dorsal-horn-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/BASAL-PART-OF-PONS-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-nucleus/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/BASAL-PART-OF-PONS/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/BASAL-GANGLIA/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-dorsal-horn/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-spinal-cord/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-spinal-cord-general/ http://brancusi1.usc.edu/RDF/name
##SAME QUERY WITHOUT LIMIT:
qres = g.query(
"""SELECT ?subject ?predicate ?text
WHERE {
?subject ?predicate ?text .
FILTER regex(?text, "^basal", "i")
}""")
for r in qres.result:
print str(r[0]), str(r[1])
#RESULTS:
http://brancusi1.usc.edu/brain_parts/BASAL-AMYGDALOID-NUCLEUS/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-dorsal-horn-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/BASAL-PART-OF-PONS-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-nucleus/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/BASAL-PART-OF-PONS/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/BASAL-GANGLIA/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-dorsal-horn/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-spinal-cord/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-spinal-cord-general/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-Nuclei/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/BASAL-AMYGDALOID-NUCLEUS-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/BASAL-GANGLIA-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-nucleus-3/ http://brancusi1.usc.edu/RDF/name
N35e0f2dd73d84d0c8dcb3334b284c38d http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-forebrain/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-forebrain/ http://brancusi1.usc.edu/RDF/abbreviation
N302065ddff7141549b427bb769c3022b http://brancusi1.usc.edu/RDF/name
Nf506bc08a91e4818962a826969fe8172 http://brancusi1.usc.edu/RDF/name
N9289aa5a064c48f5a242d602414a10f1 http://brancusi1.usc.edu/RDF/name
Nb6a2af56bb0d4cda9edccaeecc8376e4 http://brancusi1.usc.edu/RDF/name
Na6a6dc5b86b542e4929d47114f6eac5f http://brancusi1.usc.edu/RDF/name
N0da72ac281934c8d88838953987fff76 http://brancusi1.usc.edu/RDF/name
N0c534989ca564785973c6dc0739e78b6 http://brancusi1.usc.edu/RDF/name
Nffbada52fe87489bbe844e4a93b4716c http://brancusi1.usc.edu/RDF/name
Nf54dc08a3e44402aa8e4f50131f1685f http://brancusi1.usc.edu/RDF/name
N32121f94720547a4962c80f6147deda1 http://brancusi1.usc.edu/RDF/name
N0d3a6c07653c429a9b8e633396876ac1 http://brancusi1.usc.edu/RDF/name
Nbbff2a068df84b6e87ddef97dd14ef3d http://brancusi1.usc.edu/RDF/name
N98da41a65c8344a3ac29d46531894bc5 http://brancusi1.usc.edu/RDF/name
N34a62f7ac79840f3bee6fd1f7dfbf865 http://brancusi1.usc.edu/RDF/name
N6ff4c6d44a704fe6b308c65afc175c61 http://brancusi1.usc.edu/RDF/name
N27e76961005b42b4bbe766a1456bf174 http://brancusi1.usc.edu/RDF/name
N87a432ccb78a483d9f4686aa43a66f10 http://brancusi1.usc.edu/RDF/name
N007d87b14b864e99a811539b1c92cc2f http://brancusi1.usc.edu/RDF/name
Naddb806600d6460ebadd9dba870b113d http://brancusi1.usc.edu/RDF/name
N5d7380b925bb4678a3f74ae5fb0fd5fa http://brancusi1.usc.edu/RDF/name
N71f3313140fc4e06a4b40120a274b6cd http://brancusi1.usc.edu/RDF/name
N11650af875d2415ebbca2ecf3097871b http://brancusi1.usc.edu/RDF/name
Na82968e2e81d4873909f63e11d617c84 http://brancusi1.usc.edu/RDF/name
N7f725ee50861492e91f737e24f1ec626 http://brancusi1.usc.edu/RDF/name
N7aa05fa20a7246778b555b8514061f59 http://brancusi1.usc.edu/RDF/name
Nb0b1017346df4fb796d199f04170888d http://brancusi1.usc.edu/RDF/name
Nafcf51fd67674d65a0bd90214bc0e27c http://brancusi1.usc.edu/RDF/name
N35370bb394f3435b87ab1eb1f6a52050 http://brancusi1.usc.edu/RDF/name
N4cb32a6a50db402d92d944d29c08fe81 http://brancusi1.usc.edu/RDF/name
N7980bd42e52d47a092243ec22188b7fe http://brancusi1.usc.edu/RDF/name
Nc5e884b0dfd74e5694046ce392a138b0 http://brancusi1.usc.edu/RDF/name
N9fecdb3feac84266a5f5ea9d6104136c http://brancusi1.usc.edu/RDF/name
Nb94e7434c4794252a2ba1b3e46606c93 http://brancusi1.usc.edu/RDF/name
Nbd36616daecf47b5884755817fab6cb8 http://brancusi1.usc.edu/RDF/name
Nec66ee43b7b549b383243c1682658f4c http://brancusi1.usc.edu/RDF/name
N15d39c7921e84b4c81619f8f769b19bd http://brancusi1.usc.edu/RDF/name
N8cf046f332ad46ffb005875214f7b1d5 http://brancusi1.usc.edu/RDF/name
N1fc1b317e4f04a86b72e6e82f01c9f44 http://brancusi1.usc.edu/RDF/name
N4981e26c63404632883bf10762405725 http://brancusi1.usc.edu/RDF/name
N16e9fdb3042141c28f751784e67e98cb http://brancusi1.usc.edu/RDF/name
N0e5d1a8f6e7f46b39f4c8072bba4078f http://brancusi1.usc.edu/RDF/name
N99ea586e35b345baa6b69ccbdd15c80e http://brancusi1.usc.edu/RDF/name
Nf9a4c5c8840545529a0e4c2b16bd3b11 http://brancusi1.usc.edu/RDF/name
Nd7938acdfea14d759c8dfbf31f321e93 http://brancusi1.usc.edu/RDF/name
N8a4fc40eadd4404d9f68c4a64ce63e39 http://brancusi1.usc.edu/RDF/name
N0e1bd453f25e4625ae55f2133dd4a004 http://brancusi1.usc.edu/RDF/name
N4bff831afea543c9aaca9ec0eb28161b http://brancusi1.usc.edu/RDF/name
Ne48b3498521f4c23b780f6a8a38994dd http://brancusi1.usc.edu/RDF/name
N32f85bfa33ed470d836c04450414ae56 http://brancusi1.usc.edu/RDF/name
Nc3890065610a414a87b5024340b50e9d http://brancusi1.usc.edu/RDF/name
Na26618dda70e4123a877ddf951cb8824 http://brancusi1.usc.edu/RDF/name
Ne4d6872c24744e8ba21e2b9ef4a38c9e http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-nucleus-2/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-nucleus-diffuse-part/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-nuclear-complex/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-nucleus-compact-part/ http://brancusi1.usc.edu/RDF/name
N3e06a8740c4743bcbeb4a2dcca8ce499 http://brancusi1.usc.edu/RDF/name
N2f71200453f34b47a7728b65d82955de http://brancusi1.usc.edu/RDF/chapter
http://brancusi1.usc.edu/brain_parts/basal-operculum/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-nucleus-of-Meynert/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-ventromedial-nucleus-of-the-thalamus/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/basal-nucleus-Meynert/ http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/name
#Query including the actual name(s):
qres = g.query(
"""SELECT ?subject ?predicate ?text
WHERE {
?subject ?predicate ?text .
FILTER regex(?text, "^basal", "i") .
?subject ?predicate ?text
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
#Results:
http://brancusi1.usc.edu/brain_parts/BASAL-AMYGDALOID-NUCLEUS/ http://brancusi1.usc.edu/RDF/name BASAL AMYGDALOID NUCLEUS
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-dorsal-horn-2/ http://brancusi1.usc.edu/RDF/name Basal nucleus of the dorsal horn
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/name Basal ganglia
http://brancusi1.usc.edu/brain_parts/BASAL-PART-OF-PONS-2/ http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS
http://brancusi1.usc.edu/brain_parts/basal-nucleus/ http://brancusi1.usc.edu/RDF/name basal nucleus
http://brancusi1.usc.edu/brain_parts/BASAL-PART-OF-PONS/ http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS
http://brancusi1.usc.edu/brain_parts/BASAL-GANGLIA/ http://brancusi1.usc.edu/RDF/name BASAL GANGLIA
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-dorsal-horn/ http://brancusi1.usc.edu/RDF/name Basal nucleus of the dorsal horn
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-spinal-cord/ http://brancusi1.usc.edu/RDF/name Basal nucleus of the spinal cord
http://brancusi1.usc.edu/brain_parts/Basal-nucleus-of-the-spinal-cord-general/ http://brancusi1.usc.edu/RDF/name Basal nucleus of the spinal cord, general
http://brancusi1.usc.edu/brain_parts/Basal-Nuclei/ http://brancusi1.usc.edu/RDF/name Basal Nuclei
http://brancusi1.usc.edu/brain_parts/BASAL-AMYGDALOID-NUCLEUS-2/ http://brancusi1.usc.edu/RDF/name BASAL AMYGDALOID NUCLEUS
http://brancusi1.usc.edu/brain_parts/BASAL-GANGLIA-2/ http://brancusi1.usc.edu/RDF/name BASAL GANGLIA
http://brancusi1.usc.edu/brain_parts/basal-nucleus-3/ http://brancusi1.usc.edu/RDF/name basal nucleus
N35e0f2dd73d84d0c8dcb3334b284c38d http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS : pontine nuclei
http://brancusi1.usc.edu/brain_parts/Basal-forebrain/ http://brancusi1.usc.edu/RDF/name Basal forebrain
http://brancusi1.usc.edu/brain_parts/Basal-forebrain/ http://brancusi1.usc.edu/RDF/abbreviation Basal forebrain
N302065ddff7141549b427bb769c3022b http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : claustral amygdaloid area
Nf506bc08a91e4818962a826969fe8172 http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS : longitudinal pontine fibers
N9289aa5a064c48f5a242d602414a10f1 http://brancusi1.usc.edu/RDF/name Basal ganglia : Striatum
Nb6a2af56bb0d4cda9edccaeecc8376e4 http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : STRIATUM
Na6a6dc5b86b542e4929d47114f6eac5f http://brancusi1.usc.edu/RDF/name Basal ganglia : Fundus striati
N0da72ac281934c8d88838953987fff76 http://brancusi1.usc.edu/RDF/name Basal ganglia : Lateral striatal stripe
N0c534989ca564785973c6dc0739e78b6 http://brancusi1.usc.edu/RDF/name basal nucleus, diffuse part : nucleus of the ansa peduncularis
Nffbada52fe87489bbe844e4a93b4716c http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS : longitudinal pontine fibers
Nf54dc08a3e44402aa8e4f50131f1685f http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : STRIATUM
N32121f94720547a4962c80f6147deda1 http://brancusi1.usc.edu/RDF/name Basal forebrain : Nucleus accumbens
N0d3a6c07653c429a9b8e633396876ac1 http://brancusi1.usc.edu/RDF/name Basal nucleus of the dorsal horn : Lateral spinal nucleus
Nbbff2a068df84b6e87ddef97dd14ef3d http://brancusi1.usc.edu/RDF/name BASAL AMYGDALOID NUCLEUS : lateral part of basal amygdaloid nucleus
N98da41a65c8344a3ac29d46531894bc5 http://brancusi1.usc.edu/RDF/name basal nucleus of Meynert : basal nucleus, compact part
N34a62f7ac79840f3bee6fd1f7dfbf865 http://brancusi1.usc.edu/RDF/name Basal nuclear complex : basal nucleus of Meynert
N6ff4c6d44a704fe6b308c65afc175c61 http://brancusi1.usc.edu/RDF/name Basal Nuclei : Striatum
N27e76961005b42b4bbe766a1456bf174 http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS : pontine nuclei
N87a432ccb78a483d9f4686aa43a66f10 http://brancusi1.usc.edu/RDF/name Basal nucleus of the dorsal horn : Lateral cervical nucleus
N007d87b14b864e99a811539b1c92cc2f http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS : transverse pontine fibers
Naddb806600d6460ebadd9dba870b113d http://brancusi1.usc.edu/RDF/name Basal nucleus of the spinal cord, general : Basal nucleus of the spinal cord
N5d7380b925bb4678a3f74ae5fb0fd5fa http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : external capsule
N71f3313140fc4e06a4b40120a274b6cd http://brancusi1.usc.edu/RDF/name BASAL AMYGDALOID NUCLEUS : lateral part of basal amygdaloid nucleus
N11650af875d2415ebbca2ecf3097871b http://brancusi1.usc.edu/RDF/name Basal ganglia : Pallidum
Na82968e2e81d4873909f63e11d617c84 http://brancusi1.usc.edu/RDF/name Basal nucleus of the dorsal horn : Lateral spinal nucleus
N7f725ee50861492e91f737e24f1ec626 http://brancusi1.usc.edu/RDF/name Basal ganglia : basal nucleus
N7aa05fa20a7246778b555b8514061f59 http://brancusi1.usc.edu/RDF/name Basal forebrain : Substantia innominata
Nb0b1017346df4fb796d199f04170888d http://brancusi1.usc.edu/RDF/name Basal Nuclei : Pallidum
Nafcf51fd67674d65a0bd90214bc0e27c http://brancusi1.usc.edu/RDF/name Basal ganglia : Pallidum
N35370bb394f3435b87ab1eb1f6a52050 http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS : transverse pontine fibers
N4cb32a6a50db402d92d944d29c08fe81 http://brancusi1.usc.edu/RDF/name BASAL AMYGDALOID NUCLEUS : medial part of basal amygdaloid nucleus
N7980bd42e52d47a092243ec22188b7fe http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : extreme capsule
Nc5e884b0dfd74e5694046ce392a138b0 http://brancusi1.usc.edu/RDF/name Basal ganglia : Interstitial nucleus of the posterior limb of the anterior commissure
N9fecdb3feac84266a5f5ea9d6104136c http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : GLOBUS PALLIDUS
Nb94e7434c4794252a2ba1b3e46606c93 http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : claustral amygdaloid area
Nbd36616daecf47b5884755817fab6cb8 http://brancusi1.usc.edu/RDF/name Basal forebrain : Bed nuclei of the stria terminalis
Nec66ee43b7b549b383243c1682658f4c http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : AMYGDALA
N15d39c7921e84b4c81619f8f769b19bd http://brancusi1.usc.edu/RDF/name basal nucleus, diffuse part : Nucleus ansae lenticularis
N8cf046f332ad46ffb005875214f7b1d5 http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : external capsule
N1fc1b317e4f04a86b72e6e82f01c9f44 http://brancusi1.usc.edu/RDF/name Basal nucleus of the spinal cord, general : Lateral spinal nucleus
N4981e26c63404632883bf10762405725 http://brancusi1.usc.edu/RDF/name basal nucleus of Meynert : basal nucleus, diffuse part
N16e9fdb3042141c28f751784e67e98cb http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS : middle cerebellar peduncle
N0e5d1a8f6e7f46b39f4c8072bba4078f http://brancusi1.usc.edu/RDF/name Basal forebrain : Putamen
N99ea586e35b345baa6b69ccbdd15c80e http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : claustrum
Nf9a4c5c8840545529a0e4c2b16bd3b11 http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : claustrum
Nd7938acdfea14d759c8dfbf31f321e93 http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : GLOBUS PALLIDUS
N8a4fc40eadd4404d9f68c4a64ce63e39 http://brancusi1.usc.edu/RDF/name BASAL PART OF PONS : middle cerebellar peduncle
N0e1bd453f25e4625ae55f2133dd4a004 http://brancusi1.usc.edu/RDF/name BASAL AMYGDALOID NUCLEUS : medial part of basal amygdaloid nucleus
N4bff831afea543c9aaca9ec0eb28161b http://brancusi1.usc.edu/RDF/name Basal nucleus of the spinal cord, general : Lateral cervical nucleus
Ne48b3498521f4c23b780f6a8a38994dd http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : AMYGDALA
N32f85bfa33ed470d836c04450414ae56 http://brancusi1.usc.edu/RDF/name Basal ganglia : bed nucleus of the accessory olfactory tract
Nc3890065610a414a87b5024340b50e9d http://brancusi1.usc.edu/RDF/name BASAL GANGLIA : extreme capsule
Na26618dda70e4123a877ddf951cb8824 http://brancusi1.usc.edu/RDF/name Basal nucleus of the dorsal horn : Lateral cervical nucleus
Ne4d6872c24744e8ba21e2b9ef4a38c9e http://brancusi1.usc.edu/RDF/name Basal ganglia : Striatum
http://brancusi1.usc.edu/brain_parts/basal-nucleus-2/ http://brancusi1.usc.edu/RDF/name basal nucleus
http://brancusi1.usc.edu/brain_parts/basal-nucleus-diffuse-part/ http://brancusi1.usc.edu/RDF/name basal nucleus, diffuse part
http://brancusi1.usc.edu/brain_parts/Basal-nuclear-complex/ http://brancusi1.usc.edu/RDF/name Basal nuclear complex
http://brancusi1.usc.edu/brain_parts/basal-nucleus-compact-part/ http://brancusi1.usc.edu/RDF/name basal nucleus, compact part
N3e06a8740c4743bcbeb4a2dcca8ce499 http://brancusi1.usc.edu/RDF/name Basal nucleus of the dorsal horn - equivalent class - Basal nucleus of the spinal cord
N2f71200453f34b47a7728b65d82955de http://brancusi1.usc.edu/RDF/chapter Basal ganglia
http://brancusi1.usc.edu/brain_parts/basal-operculum/ http://brancusi1.usc.edu/RDF/name basal operculum
http://brancusi1.usc.edu/brain_parts/basal-nucleus-of-Meynert/ http://brancusi1.usc.edu/RDF/name basal nucleus of Meynert
http://brancusi1.usc.edu/brain_parts/basal-ventromedial-nucleus-of-the-thalamus/ http://brancusi1.usc.edu/RDF/name basal ventromedial nucleus of the thalamus
http://brancusi1.usc.edu/brain_parts/basal-nucleus-Meynert/ http://brancusi1.usc.edu/RDF/name basal nucleus (Meynert)
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/name Basal ganglia
#Query:
qres = g.query(
"""SELECT ?subject ?predicate ?object
WHERE {
?subject ?predicate ?object .
} LIMIT 100""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
#RESULTS:
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/description No description provided. The nomenclature was adapted from the atlas.
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/name pineal gland
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy of this region was constructed
using the parcellation scheme in this atlas.
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/collatorInvolvement http://brancusi1.usc.edu/RDF/expertiseAndCollationNomenclatureCitedReferences
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/abbreviation Pi
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/collationDate 2003-02-27
http://brancusi1.usc.edu/brain_parts/pineal-gland-3/ http://brancusi1.usc.edu/RDF/collator 510
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/description No description provided. The nomenclature was adapted from the atlas.
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/name ventromedial hypothalamic nucleus central part
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy of this region was constructed using the rat atlas Paxinos and Watson 1998 and Simerly 1995.
See also Swanson 1992.
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/collatorInvolvement http://brancusi1.usc.edu/RDF/expertiseAndCollationNomenclatureCitedReferences
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/abbreviation VMHC
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/collationDate 2003-02-27
http://brancusi1.usc.edu/brain_parts/ventromedial-hypothalamic-nucleus-central-part-3/ http://brancusi1.usc.edu/RDF/collator 510
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/description No description provided. The nomenclature was adapted from the atlas.
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/name optic nerve layer of the superior colliculus
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy of this region was constructed
using the parcellation scheme in this atlas and the information collated from Bowden 2002.
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/collatorInvolvement http://brancusi1.usc.edu/RDF/expertiseAndCollationNomenclatureCitedReferences
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/abbreviation Op
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/collationDate 2003-02-27
http://brancusi1.usc.edu/brain_parts/optic-nerve-layer-of-the-superior-colliculus-2/ http://brancusi1.usc.edu/RDF/collator 510
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/description No description provided. The nomenclature was adapted from the atlas.
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/name spinal trigeminal nucleus oral part
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy of this region was constructed
using the parcellation scheme in this atlas.
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/collatorInvolvement http://brancusi1.usc.edu/RDF/expertiseAndCollationNomenclatureCitedReferences
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/abbreviation Sp5O
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/collationDate 2003-02-27
http://brancusi1.usc.edu/brain_parts/spinal-trigeminal-nucleus-oral-part-2/ http://brancusi1.usc.edu/RDF/collator 510
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/description No description provided. The nomenclature was adapted from the atlas.
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/name medial amygdaloid nucleus anterior dorsal
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy of this region was constructed using the rat brain atlas Paxinos and Watson 1986,
and Alheid et al. 1995. See also Swanson 1992
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/collatorInvolvement http://brancusi1.usc.edu/RDF/expertiseAndCollationNomenclatureCitedReferences
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/abbreviation MeAD
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/collationDate 2003-02-27
http://brancusi1.usc.edu/brain_parts/medial-amygdaloid-nucleus-anterior-dorsal/ http://brancusi1.usc.edu/RDF/collator 510
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/description No description provided. The nomenclature was adapted from the atlas.
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/name central amygdaloid nucleus medial division anteroventral part
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy of this region was constructed using the rat brain atlas Paxinos and Watson 1986,
and Alheid et al. 1995. See also Swanson 1992
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/collatorInvolvement http://brancusi1.usc.edu/RDF/expertiseAndCollationNomenclatureCitedReferences
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/abbreviation CeMAV
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/collationDate 2003-02-27
http://brancusi1.usc.edu/brain_parts/central-amygdaloid-nucleus-medial-division-anteroventral-part/ http://brancusi1.usc.edu/RDF/collator 510
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/description No description provided. The nomenclature was adapted from the atlas.
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/name ventromedial preoptic nucleus
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy of this region was constructed using the rat atlas Paxinos and Watson 1998 and Simerly 1995.
See also Swanson 1992.
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/collatorInvolvement http://brancusi1.usc.edu/RDF/expertiseAndCollationNomenclatureCitedReferences
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/abbreviation VMPO
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/collationDate 2003-02-27
http://brancusi1.usc.edu/brain_parts/ventromedial-preoptic-nucleus-2/ http://brancusi1.usc.edu/RDF/collator 510
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://brancusi1.usc.edu/RDF/description No description provided. The nomenclature was adapted from the atlas.
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://brancusi1.usc.edu/RDF/name lateral septal nucleus
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy of this region was constructed
using Jakab and Leranth 1995. See also Swanson 1992
http://brancusi1.usc.edu/brain_parts/lateral-septal-nucleus-5/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
#This actually works
##CONSTRUCTING THE QUERIES TO GET THE RIGHT INFO:
######################################################################################################
######################################################################################################
#BASAL GANGLIA QUERY (note:everything is tabbed right one):
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:name "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
#RESULTS:
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/description No description provided. Collator note: Abbreviation of this brain part was inserted by the collator.See the human brain nomenclature Bowden 2000.
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/name Basal ganglia
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/Hof-et-al-2000/
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/reference N539fbbc6f7ee43bea86cfe4614cd1ce5
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/abbreviation BG
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/collationDate 2003-11-28
http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/ http://brancusi1.usc.edu/RDF/collator 516
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/description Collator note: this region does not appear in the list of structures, nor in the the list of abbreviations, but is used as a superstructure in the section of delineation criteria of the mouse regions.
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/grossConstituent http://brancusi1.usc.edu/RDF/grayMatter
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/name Basal ganglia
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/nomenclature http://brancusi1.usc.edu/rdf/nomenclature/PaxinosFranklin-2001/
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/reference Nccb8a6aab6fb4eedaa16704b7cc865d1
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/species http://brancusi1.usc.edu/RDF/mouse
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/workspace 0
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/collatorArgument The hierarchy was constructed from the associated atlas.
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://brancusi1.usc.edu/RDF/brainPart
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/collatorInvolvement http://brancusi1.usc.edu/RDF/expertiseAndCollationNomenclatureCitedReferences
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/abbreviation BG
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/collationDate 2003-04-16
http://brancusi1.usc.edu/brain_parts/Basal-ganglia/ http://brancusi1.usc.edu/RDF/collator 510
######################################################################################################
######################################################################################################
######################################################################################################
######################################################################################################
#(MODIFIED) BASAL GANGLIA QUERY (note:everything is tabbed right one):
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?object bamsProp:name "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
#RESULTS:
N9289aa5a064c48f5a242d602414a10f1 http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/
Na6a6dc5b86b542e4929d47114f6eac5f http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/
N0da72ac281934c8d88838953987fff76 http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/
N11650af875d2415ebbca2ecf3097871b http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/
N81e4e6e8891e42338caa6e5742060071 http://brancusi1.usc.edu/RDF/class2 http://brancusi1.usc.edu/brain_parts/Basal-ganglia-2/
Nf868f21d20604feba0c04303c92849b6 http://brancusi1.usc.edu/RDF/class2 http://brancusi1.usc.edu/brain_parts/Basal-ganglia/
N7f725ee50861492e91f737e24f1ec626 http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia/
Nafcf51fd67674d65a0bd90214bc0e27c http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia/
Nc5e884b0dfd74e5694046ce392a138b0 http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia/
N32f85bfa33ed470d836c04450414ae56 http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia/
Ne4d6872c24744e8ba21e2b9ef4a38c9e http://brancusi1.usc.edu/RDF/class1 http://brancusi1.usc.edu/brain_parts/Basal-ganglia/
######################################################################################################
######################################################################################################
##THE CURRENT RESULTS THAT ARE PUBLISHED IN THE tempVTest.csv document belong to
##"BASAL GANGLIA QUERY" -- aka the second to last query
#need to isolate the names of the identifiers (terms, names, etc.):
http://brancusi1.usc.edu/RDF/description
http://brancusi1.usc.edu/RDF/grossConstituent
http://brancusi1.usc.edu/RDF/name
http://brancusi1.usc.edu/RDF/nomenclature
http://brancusi1.usc.edu/RDF/reference
http://brancusi1.usc.edu/RDF/species
http://brancusi1.usc.edu/RDF/workspace
http://brancusi1.usc.edu/RDF/collatorArgument
http://www.w3.org/1999/02/22-rdf-syntax-ns#type
http://brancusi1.usc.edu/RDF/collatorInvolvement
http://brancusi1.usc.edu/RDF/abbreviation
http://brancusi1.usc.edu/RDF/collationDate
http://brancusi1.usc.edu/RDF/collator
BAMS_Dict = {"description": , "grossConstituent": , "name": , "nomenclature": , "reference": , "species": , "workspace": , "collatorArgument": , "http://www.w3.org/1999/02/22-rdf-syntax-ns#type": , "collatorInvolvement": , "abbreviation": , "collationDate": , "collator": }
import csv
for r in qres.result:
#print str(r[0]), str(r[1]), str(r[2])
c = csv.writer(open("tempVTest.csv","wb"))
# gives us the triple info in each cell (notice it's not in string format) it's pretty ugly
#c.writerow(qres.result)
# regardless of the format, i'm going to index this first
# figure out how to place at the next
# need to access each individual part of the triple
# making row plural allows for this type of functionality
#################################################################
#csv.DictWriter.writeheader('subject', 'predicate', 'object')
#################################################################
c.writerows(qres.result)
#lists all of the dialects
#csv.list_dialects()
#>>>['excel-tab', 'excel']
#maximum dialect allowed by parser
#csv.field_size_limit()
#>>>131072
#########################################################################
for r in qres.result:
sub = str(r[0])
pred = str(r[1])
obj = str(r[2])
#need to parse qres.result based on the "," 's .... then we can display them in a graph
#my_dict = {"Subject": qres.result[0], "Predicate": qres.result[1], "Object": qres.result[2]}
BAMS_Dict = {"description": qres.result[0][2], "grossConstituent": qres.result[1][2], "name": qres.result[2][2] , "nomenclature": qres.result[3][2], "reference": qres.result[4][2], "species": qres.result[5][2], "workspace": qres.result[6][2], "collatorArgument": qres.result[7][2], "http://www.w3.org/1999/02/22-rdf-syntax-ns#type": qres.result[8][2], "collatorInvolvement": qres.result[9][2], "abbreviation": qres.result[10][2], "collationDate": qres.result[11][2], "collator": qres.result[12][2]}
with open('mycsvfile.csv', 'wb') as f: # Just use 'w' mode in 3.x
w = csv.DictWriter(f, BAMS_Dict.keys())
w.writeheader()
w.writerow(BAMS_Dict)
with open('tempVTest.csv', 'rb') as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read(1024))
csvfile.seek(0)
reader = csv.reader(csvfile, dialect)
print dialect
print str(reader)
csv.Sniffer().has_header('tempVTest.csv')
#returns true no matter what string is passed
#returns false if no string is passed
#returns true when csv file is passed
#DictWriter.writeheader()
#command is used to write headers of the rows
##########
###########
For BAMS Thesaurus RDF:
import csv
for r in qres.result:
#print str(r[0]), str(r[1]), str(r[2])
c = csv.writer(open("BAMS_Thesaurus_Data4Upload.csv","wb"))
#c.read()
# gives us the triple info in each cell (notice it's not in string format) it's pretty ugly
#c.writerow(qres.result)
# regardless of the format, i'm going to index this first
# figure out how to place at the next
# need to access each individual part of the triple
# making row plural allows for this type of functionality
#################################################################
#csv.DictWriter.writeheader('subject', 'predicate', 'object')
#################################################################
c.writerows(qres.result)
dialect = c.Sniffer().sniff(c.read(1024))
c.seek(0)
reader = csv.reader(c, dialect)
print str(reader)
#Using sniffer to figure out the current dialect:
#with open('BAMS_Thesaurus_Data4Upload.csv', 'rb') as csvfile:
#dialect = csv.Sniffer().sniff(c.read(1024))
# trying revised statement for debugging purposes
dialect = c.Sniffer().sniff(c.read(1024))
c.seek(0)
reader = csv.reader(c, dialect)
print str(reader)
| 65.558848 | 1,298 | 0.747144 | 11,154 | 79,654 | 5.304913 | 0.065447 | 0.168512 | 0.207399 | 0.246286 | 0.908148 | 0.901962 | 0.88687 | 0.791689 | 0.781024 | 0.7672 | 0 | 0.068455 | 0.079908 | 79,654 | 1,214 | 1,299 | 65.61285 | 0.73891 | 0.099844 | 0 | 0.417933 | 0 | 0.478723 | 0.066327 | 0.042304 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.00152 | 0.027356 | null | null | 0.069909 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
8ffc9465be40c71584535f9b2d530eeba31ef72c | 9,533 | py | Python | neural-style/vgg.py | DavidLi-Minxiao/mchacks-2017 | 0e660c6f3cd381600b1c36aafed5cd3ab1401f0e | [
"MIT"
] | 1 | 2017-04-18T08:42:46.000Z | 2017-04-18T08:42:46.000Z | neural-style/vgg.py | DavidLi-Minxiao/mchacks-2017 | 0e660c6f3cd381600b1c36aafed5cd3ab1401f0e | [
"MIT"
] | null | null | null | neural-style/vgg.py | DavidLi-Minxiao/mchacks-2017 | 0e660c6f3cd381600b1c36aafed5cd3ab1401f0e | [
"MIT"
] | null | null | null |
import tensorflow as tf
import numpy as np
import os
import urllib
from scipy.misc import imread, imresize
from tf_util import kernel_variable, bias_variable
def download_weights_maybe(weight_file):
if not os.path.exists(weight_file):
print "Downloading weights from https://www.cs.toronto.edu/~frossard/vgg16/vgg16_weights.npz"
urllib.urlretrieve("https://www.cs.toronto.edu/~frossard/vgg16/vgg16_weights.npz", weight_file)
class vgg16:
def __init__(self, imgs, reuse=False):
self.imgs = imgs
self.convlayers(reuse)
def convlayers(self, reuse=False):
self.parameters = []
# conv1_1
with tf.variable_scope('conv1_1', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 3, 64], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.imgs, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[64], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv1_1 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# conv1_2
with tf.variable_scope('conv1_2', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 64, 64], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.conv1_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[64], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv1_2 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# pool1
self.pool1 = tf.nn.avg_pool(self.conv1_2,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool1')
# conv2_1
with tf.variable_scope('conv2_1', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 64, 128], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.pool1, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[128], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv2_1 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# conv2_2
with tf.variable_scope('conv2_2', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 128, 128], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.conv2_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[128], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv2_2 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# pool2
self.pool2 = tf.nn.avg_pool(self.conv2_2,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool2')
# conv3_1
with tf.variable_scope('conv3_1', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 128, 256], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.pool2, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[256], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv3_1 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# conv3_2
with tf.variable_scope('conv3_2', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 256, 256], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.conv3_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[256], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv3_2 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# conv3_3
with tf.variable_scope('conv3_3', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 256, 256], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.conv3_2, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[256], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv3_3 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# pool3
self.pool3 = tf.nn.avg_pool(self.conv3_3,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool3')
# conv4_1
with tf.variable_scope('conv4_1', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 256, 512], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.pool3, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[512], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv4_1 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# conv4_2
with tf.variable_scope('conv4_2', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 512, 512], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.conv4_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[512], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv4_2 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# conv4_3
with tf.variable_scope('conv4_3', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 512, 512], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.conv4_2, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[512], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv4_3 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# pool4
self.pool4 = tf.nn.avg_pool(self.conv4_3,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool4')
# conv5_1
with tf.variable_scope('conv5_1', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 512, 512], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.pool4, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[512], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv5_1 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# conv5_2
with tf.variable_scope('conv5_2', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 512, 512], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.conv5_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[512], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv5_2 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# conv5_3
with tf.variable_scope('conv5_3', reuse=reuse) as scope:
kernel = kernel_variable('weights', shape=[3, 3, 512, 512], trainable=False, collection='VGG_weights')
conv = tf.nn.conv2d(self.conv5_2, kernel, [1, 1, 1, 1], padding='SAME')
biases = bias_variable('biases', shape=[512], trainable=False, collection='VGG_weights')
out = tf.nn.bias_add(conv, biases)
self.conv5_3 = tf.nn.relu(out, name=scope.name)
self.parameters += [kernel, biases]
# pool5
self.pool5 = tf.nn.avg_pool(self.conv5_3,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool4')
def get_layer(self, layer_name):
return getattr(self, layer_name)
def load_weights(self, weight_file, sess):
weights = np.load(weight_file)
keys = sorted(weights.keys())
for i, k in enumerate(keys):
if i < len(self.parameters):
print i, k, np.shape(weights[k])
sess.run(self.parameters[i].assign(weights[k]))
if __name__ == '__main__':
sess = tf.Session()
imgs = tf.placeholder(tf.float32, [None, 224, 224, 3])
vgg = vgg16(imgs)
vgg.load_weights('weights/vgg16_weights.npz', sess)
| 49.139175 | 115 | 0.5696 | 1,218 | 9,533 | 4.32266 | 0.086207 | 0.033428 | 0.118519 | 0.133333 | 0.835328 | 0.759354 | 0.759354 | 0.759354 | 0.759354 | 0.759354 | 0 | 0.057084 | 0.290675 | 9,533 | 193 | 116 | 49.393782 | 0.721532 | 0.013952 | 0 | 0.431507 | 0 | 0 | 0.087536 | 0.002666 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.041096 | null | null | 0.013699 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8909af4b6b00fc4e455cdaf82053c90267449f0c | 2,586 | py | Python | tests/schema/types/test_array.py | manoadamro/flapi | e5ed4ebbb49ac88ce842c04ce73d0a97ce3fe00d | [
"MIT"
] | 3 | 2019-01-07T20:20:30.000Z | 2019-01-11T11:15:19.000Z | tests/schema/types/test_array.py | manoadamro/flapi | e5ed4ebbb49ac88ce842c04ce73d0a97ce3fe00d | [
"MIT"
] | null | null | null | tests/schema/types/test_array.py | manoadamro/flapi | e5ed4ebbb49ac88ce842c04ce73d0a97ce3fe00d | [
"MIT"
] | 1 | 2019-01-11T11:15:27.000Z | 2019-01-11T11:15:27.000Z | import unittest
import flapi.schema.errors
import flapi.schema.types
class BasicSchema(flapi.schema.types.Schema):
thing = flapi.schema.types.Bool()
class ArrayTest(unittest.TestCase):
def test_min_only(self):
prop = flapi.schema.types.Array(flapi.schema.types.Bool, min_length=0)
self.assertEqual(prop([True, True]), [True, True])
def test_min_only_out_of_range(self):
prop = flapi.schema.types.Array(flapi.schema.types.Bool, min_length=1)
self.assertRaises(flapi.schema.errors.SchemaValidationError, prop, [])
def test_max_only(self):
prop = flapi.schema.types.Array(flapi.schema.types.Bool, max_length=3)
self.assertEqual(prop([True, True]), [True, True])
def test_max_only_out_of_range(self):
prop = flapi.schema.types.Array(flapi.schema.types.Bool, max_length=3)
self.assertRaises(
flapi.schema.errors.SchemaValidationError, prop, [True, True, True, True]
)
def test_min_and_max(self):
prop = flapi.schema.types.Array(
flapi.schema.types.Bool, min_length=0, max_length=3
)
self.assertEqual(prop([True, True]), [True, True])
def test_min_and_max_out_of_range(self):
prop = flapi.schema.types.Array(
flapi.schema.types.Bool, min_length=0, max_length=3
)
self.assertRaises(
flapi.schema.errors.SchemaValidationError, prop, [True, True, True, True]
)
def test_no_range(self):
prop = flapi.schema.types.Array(flapi.schema.types.Bool)
self.assertEqual(prop([True, True, True, True]), [True, True, True, True])
def test_array_of_property(self):
prop = flapi.schema.types.Array(flapi.schema.types.Bool)
self.assertEqual(prop([True, True]), [True, True])
def test_array_of_property_fails(self):
prop = flapi.schema.types.Array(flapi.schema.types.Bool)
self.assertRaises(flapi.schema.errors.SchemaValidationError, prop, [True, ""])
def test_wrong_type(self):
prop = flapi.schema.types.Array(BasicSchema, callback=None)
self.assertRaises(flapi.schema.errors.SchemaValidationError, prop, 12)
def test_callback(self):
prop = flapi.schema.types.Array(
BasicSchema, callback=lambda v: [{"thing": True}]
)
self.assertEqual(prop([{"thing": False}, {"thing": False}]), [{"thing": True}])
def test_no_callback(self):
prop = flapi.schema.types.Array(BasicSchema, callback=None)
self.assertEqual(prop([{"thing": False}]), [{"thing": False}])
| 37.478261 | 87 | 0.665507 | 330 | 2,586 | 5.078788 | 0.130303 | 0.196897 | 0.229117 | 0.136038 | 0.855609 | 0.855609 | 0.855609 | 0.74463 | 0.710024 | 0.636038 | 0 | 0.004824 | 0.198376 | 2,586 | 68 | 88 | 38.029412 | 0.803666 | 0 | 0 | 0.384615 | 0 | 0 | 0.011601 | 0 | 0 | 0 | 0 | 0 | 0.230769 | 1 | 0.230769 | false | 0 | 0.057692 | 0 | 0.346154 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
64ec114614b3b28c64ea312ca89cdc3e997ad1fe | 21,377 | py | Python | DQM/Physics/python/singleTopDQM_cfi.py | pasmuss/cmssw | 566f40c323beef46134485a45ea53349f59ae534 | [
"Apache-2.0"
] | null | null | null | DQM/Physics/python/singleTopDQM_cfi.py | pasmuss/cmssw | 566f40c323beef46134485a45ea53349f59ae534 | [
"Apache-2.0"
] | null | null | null | DQM/Physics/python/singleTopDQM_cfi.py | pasmuss/cmssw | 566f40c323beef46134485a45ea53349f59ae534 | [
"Apache-2.0"
] | null | null | null | import FWCore.ParameterSet.Config as cms
EletightIsoCut = "(gsfElectronRef.pfIsolationVariables.sumChargedHadronPt + max(0., gsfElectronRef.pfIsolationVariables.sumNeutralHadronEt + gsfElectronRef.pfIsolationVariables.sumPhotonEt - 0.5 * gsfElectronRef.pfIsolationVariables.sumPUPt) ) / gsfElectronRef.pt < 0.1"
ElelooseIsoCut = "(gsfElectronRef.pfIsolationVariables.sumChargedHadronPt + max(0., gsfElectronRef.pfIsolationVariables.sumNeutralHadronEt + gsfElectronRef.pfIsolationVariables.sumPhotonEt - 0.5 * gsfElectronRef.pfIsolationVariables.sumPUPt) ) / gsfElectronRef.pt < 0.15"
singleTopTChannelLeptonDQM = cms.EDAnalyzer("SingleTopTChannelLeptonDQM",
## ------------------------------------------------------
## SETUP
##
## configuration of the MonitoringEnsemble(s)
## [mandatory] : optional PSets may be omitted
##
setup = cms.PSet(
## sub-directory to write the monitor histograms to
## [mandatory] : should not be changed w/o explicit
## communication to TopCom!
directory = cms.string("Physics/Top/SingleTopDQM/"),
## [mandatory]
sources = cms.PSet(
muons = cms.InputTag("pfIsolatedMuonsEI"),
elecs = cms.InputTag("pfIsolatedElectronsEI"),
jets = cms.InputTag("ak4PFJetsCHS"),
mets = cms.VInputTag("met", "tcMet", "pfMetEI"),
pvs = cms.InputTag("offlinePrimaryVertices")
),
## [optional] : when omitted the verbosity level is set to STANDARD
monitoring = cms.PSet(
verbosity = cms.string("DEBUG")
),
## [optional] : when omitted all monitoring plots for primary vertices
## will be filled w/o extras
# pvExtras = cms.PSet(
## when omitted electron plots will be filled w/o additional pre-
## selection of the primary vertex candidates
# select = cms.string("abs(x)<1. & abs(y)<1. & abs(z)<20. & tracksSize>3 & !isFake")
# ),
## [optional] : when omitted all monitoring plots for electrons
## will be filled w/o extras
elecExtras = cms.PSet(
## when omitted electron plots will be filled w/o cut on electronId
##electronId = cms.PSet( src = cms.InputTag("mvaTrigV0"), cutValue = cms.double(0.5) ),
## when omitted electron plots will be filled w/o additional pre-
## selection of the electron candidates
select = cms.string("pt>15 & abs(eta)<2.5 & abs(gsfElectronRef.gsfTrack.d0)<1 & abs(gsfElectronRef.gsfTrack.dz)<20"),
## when omitted isolated electron multiplicity plot will be equi-
## valent to inclusive electron multiplicity plot
isolation = cms.string(ElelooseIsoCut),
),
## [optional] : when omitted all monitoring plots for muons
## will be filled w/o extras
muonExtras = cms.PSet(
## when omitted muon plots will be filled w/o additional pre-
## selection of the muon candidates
select = cms.string("pt>10 & abs(eta)<2.1 & isGlobalMuon & abs(globalTrack.d0)<1 & abs(globalTrack.dz)<20"),
## when omitted isolated muon multiplicity plot will be equi-
## valent to inclusive muon multiplicity plot
# isolation = cms.string("(isolationR03.sumPt+isolationR03.emEt+isolationR03.hadEt)/pt<0.1"),
),
## [optional] : when omitted all monitoring plots for jets will
## be filled from uncorrected jets
jetExtras = cms.PSet(
## when omitted monitor plots for pt will be filled from uncorrected
## jets
jetCorrector = cms.string("ak4CaloL2L3"),
## when omitted monitor plots will be filled w/o additional cut on
## jetID
# jetID = cms.PSet(
# label = cms.InputTag("ak4JetID"),
# select = cms.string("fHPD < 0.98 & n90Hits>1 & restrictedEMF<1")
# ),
## when omitted no extra selection will be applied on jets before
## filling the monitor histograms; if jetCorrector is present the
## selection will be applied to corrected jets
select = cms.string("pt>15 & abs(eta)<2.5 & emEnergyFraction>0.01"),
),
## [optional] : when omitted no mass window will be applied
## for the W mass befor filling the event monitoring plots
# massExtras = cms.PSet(
# lowerEdge = cms.double( 70.),
# upperEdge = cms.double(110.)
# ),
## [optional] : when omitted the monitoring plots for triggering
## will be empty
triggerExtras = cms.PSet(
src = cms.InputTag("TriggerResults","","HLT"),
paths = cms.vstring(['HLT_Mu3:HLT_QuadJet15U',
'HLT_Mu5:HLT_QuadJet15U',
'HLT_Mu7:HLT_QuadJet15U',
'HLT_Mu9:HLT_QuadJet15U'])
)
),
## ------------------------------------------------------
## PRESELECTION
##
## setup of the event preselection, which will not
## be monitored
## [mandatory] : but may be empty
##
preselection = cms.PSet(
## [optional] : when omitted no preselection is applied
# trigger = cms.PSet(
# src = cms.InputTag("TriggerResults","","HLT"),
# select = cms.vstring(['HLT_Mu11', 'HLT_Ele15_LW_L1R', 'HLT_QuadJet30'])
# ),
## [optional] : when omitted no preselection is applied
# vertex = cms.PSet(
# src = cms.InputTag("offlinePrimaryVertices"),
# select = cms.string('abs(x)<1. & abs(y)<1. & abs(z)<20. & tracksSize>3 & !isFake')
# )
),
## ------------------------------------------------------
## SELECTION
##
## monitor histrograms are filled after each selection
## step, the selection is applied in the order defined
## by this vector
## [mandatory] : may be empty or contain an arbitrary
## number of PSets
##
selection = cms.VPSet(
cms.PSet(
label = cms.string("jets/calo:step0"),
src = cms.InputTag("ak4CaloJets"),
select = cms.string("pt>20 & abs(eta)<2.1 & 0.05<emEnergyFraction"),
jetID = cms.PSet(
label = cms.InputTag("ak4JetID"),
select = cms.string("fHPD < 0.98 & n90Hits>1 & restrictedEMF<1")
),
min = cms.int32(2),
)
)
)
singleTopMuonMediumDQM = cms.EDAnalyzer("SingleTopTChannelLeptonDQM",
## ------------------------------------------------------
## SETUP
##
## configuration of the MonitoringEnsemble(s)
## [mandatory] : optional PSets may be omitted
##
setup = cms.PSet(
## sub-directory to write the monitor histograms to
## [mandatory] : should not be changed w/o explicit
## communication to TopCom!
directory = cms.string("Physics/Top/SingleTopMuonMediumDQM/"),
## [mandatory]
sources = cms.PSet(
muons = cms.InputTag("pfIsolatedMuonsEI"),
elecs_gsf = cms.InputTag("gedGsfElectrons"),
elecs = cms.InputTag("pfIsolatedElectronsEI"),
jets = cms.InputTag("ak4PFJetsCHS"),
mets = cms.VInputTag("met", "tcMet", "pfMetEI"),
pvs = cms.InputTag("offlinePrimaryVertices")
),
## [optional] : when omitted the verbosity level is set to STANDARD
monitoring = cms.PSet(
verbosity = cms.string("DEBUG")
),
## [optional] : when omitted all monitoring plots for primary vertices
## will be filled w/o extras
# pvExtras = cms.PSet(
## when omitted electron plots will be filled w/o additional pre-
## selection of the primary vertex candidates
# select = cms.string("") #abs(x)<1. & abs(y)<1. & abs(z)<20. & tracksSize>3 & !isFake")
# ),
## [optional] : when omitted all monitoring plots for muons
## will be filled w/o extras
muonExtras = cms.PSet(
## when omitted muon plots will be filled w/o additional pre-
## selection of the muon candidates
select = cms.string("abs(muonRef.eta)<2.1")
## & isGlobalMuon & innerTrack.numberOfValidHits>10 & globalTrack.normalizedChi2>-1 & globalTrack.normalizedChi2<10
##& (isolationR03.sumPt+isolationR03.emEt+isolationR03.hadEt)/pt<0.1"),
## when omitted isolated muon multiplicity plot will be equi-
## valent to inclusive muon multiplicity plot
## isolation = cms.string("(muonRef.isolationR03.sumPt+muonRef.isolationR03.emEt+muonRef.isolationR03.hadEt)/muonRef.pt<10" )
## isolation = cms.string("(muonRef.isolationR03.sumPt+muonRef.isolationR03.emEt+muonRef.isolationR03.hadEt)/muonRef.pt<0.1")
),
## [optional] : when omitted all monitoring plots for jets
## will be filled w/o extras
jetExtras = cms.PSet(
## when omitted monitor plots for pt will be filled from uncorrected
## jets
jetCorrector = cms.string("topDQMak5PFCHSL2L3"),
## when omitted monitor plots will be filled w/o additional cut on
## jetID
# jetID = cms.PSet(
# label = cms.InputTag("ak4JetID"),
# select = cms.string(""), ##fHPD < 0.98 & n90Hits>1 & restrictedEMF<1")
# ),
## when omitted no extra selection will be applied on jets before
## filling the monitor histograms; if jetCorrector is present the
## selection will be applied to corrected jets
select = cms.string("pt>15 & abs(eta)<2.5"), # & neutralEmEnergyFraction >0.01 & chargedEmEnergyFraction>0.01"),
## when omitted monitor histograms for b-tagging will not be filled
jetBTaggers = cms.PSet(
trackCountingEff = cms.PSet(
label = cms.InputTag("pfTrackCountingHighEffBJetTags" ),
workingPoint = cms.double(1.25)
),
trackCountingPur = cms.PSet(
label = cms.InputTag("pfTrackCountingHighPurBJetTags" ),
workingPoint = cms.double(3.41)
),
secondaryVertex = cms.PSet(
label = cms.InputTag("pfSimpleSecondaryVertexHighEffBJetTags"),
workingPoint = cms.double(2.05)
),
combinedSecondaryVertex = cms.PSet(
label = cms.InputTag("pfCombinedInclusiveSecondaryVertexV2BJetTags"),
workingPoint = cms.double(0.970)
)
)
)
## [optional] : when omitted no mass window will be applied
## for the W mass before filling the event monitoring plots
# massExtras = cms.PSet(
# lowerEdge = cms.double( 70.),
# upperEdge = cms.double(110.)
# ),
## [optional] : when omitted the monitoring plots for triggering
## will be empty
# triggerExtras = cms.PSet(
# src = cms.InputTag("TriggerResults","","HLT"),
# paths = cms.vstring(['HLT_IsoMu17_eta2p1_CentralPFNoPUJet30_BTagIPIter_v1'])
# 'HLT_IsoMu24_eta2p1_v12',
# 'HLT_IsoMu20_eta2p1_CentralPFJet30_BTagIPIter_v2',
# 'HLT_IsoMu20_eta2p1_CentralPFJet30_BTagIPIter_v3'])
# )
),
## ------------------------------------------------------
## PRESELECTION
##
## setup of the event preselection, which will not
## be monitored
## [mandatory] : but may be empty
##
preselection = cms.PSet(
## [optional] : when omitted no preselection is applied
# trigger = cms.PSet(
# src = cms.InputTag("TriggerResults","","HLT"),
# select = cms.vstring(['HLT_IsoMu17_eta2p1_CentralPFNoPUJet30_BTagIPIter_v1'])
# ),
## [optional] : when omitted no preselection is applied
# vertex = cms.PSet(
# src = cms.InputTag("offlinePrimaryVertices"),
# select = cms.string('!isFake && ndof >= 4 && abs(z)<24. && position.Rho <= 2.0')
# )
),
## ------------------------------------------------------
## SELECTION
##
## monitor histrograms are filled after each selection
## step, the selection is applied in the order defined
## by this vector
## [mandatory] : may be empty or contain an arbitrary
## number of PSets
selection = cms.VPSet(
cms.PSet(
label = cms.string("presel"),
src = cms.InputTag("offlinePrimaryVertices"),
select = cms.string('!isFake && ndof >= 4 && abs(z)<24. && position.Rho <= 2.0 '),
),
cms.PSet(
label = cms.string("muons/pf:step0"),
src = cms.InputTag("pfIsolatedMuonsEI"),
select = cms.string("muonRef.pt>20 & abs(muonRef.eta)<2.1 & muonRef.isNonnull & muonRef.innerTrack.isNonnull & muonRef.isGlobalMuon & muonRef.isTrackerMuon & muonRef.innerTrack.numberOfValidHits>10 & muonRef.globalTrack.hitPattern.numberOfValidMuonHits>0 & muonRef.globalTrack.normalizedChi2<10 & muonRef.innerTrack.hitPattern.pixelLayersWithMeasurement>=1 & muonRef.numberOfMatches>1 & abs(muonRef.innerTrack.dxy)<0.02 & (muonRef.pfIsolationR04.sumChargedHadronPt + muonRef.pfIsolationR04.sumNeutralHadronEt + muonRef.pfIsolationR04.sumPhotonEt)/muonRef.pt < 0.15"),
min = cms.int32(1),
max = cms.int32(1),
),
cms.PSet(
label = cms.string("jets/pf:step1"),
src = cms.InputTag("ak4PFJetsCHS"),
jetCorrector = cms.string("topDQMak5PFCHSL2L3"),
select = cms.string(" pt>30 & abs(eta)<4.5 & numberOfDaughters>1 & ((abs(eta)>2.4) || ( chargedHadronEnergyFraction > 0 & chargedMultiplicity>0 & chargedEmEnergyFraction<0.99)) & neutralEmEnergyFraction < 0.99 & neutralHadronEnergyFraction < 0.99"),
min = cms.int32(1),
max = cms.int32(1),
),
cms.PSet(
label = cms.string("jets/pf:step2"),
src = cms.InputTag("ak4PFJetsCHS"),
jetCorrector = cms.string("topDQMak5PFCHSL2L3"),
select = cms.string(" pt>30 & abs(eta)<4.5 & numberOfDaughters>1 & ((abs(eta)>2.4) || ( chargedHadronEnergyFraction > 0 & chargedMultiplicity>0 & chargedEmEnergyFraction<0.99)) & neutralEmEnergyFraction < 0.99 & neutralHadronEnergyFraction < 0.99"),
min = cms.int32(2),
max = cms.int32(2),
)
)
)
singleTopElectronMediumDQM = cms.EDAnalyzer("SingleTopTChannelLeptonDQM",
## ------------------------------------------------------
## SETUP
##
## configuration of the MonitoringEnsemble(s)
## [mandatory] : optional PSets may be omitted
##
setup = cms.PSet(
## sub-directory to write the monitor histograms to
## [mandatory] : should not be changed w/o explicit
## communication to TopCom!
directory = cms.string("Physics/Top/SingleTopElectronMediumDQM/"),
## [mandatory]
sources = cms.PSet(
muons = cms.InputTag("pfIsolatedMuonsEI"),
elecs_gsf = cms.InputTag("gedGsfElectrons"),
elecs = cms.InputTag("pfIsolatedElectronsEI"),
jets = cms.InputTag("ak4PFJetsCHS"),
mets = cms.VInputTag("met", "tcMet", "pfMetEI"),
pvs = cms.InputTag("offlinePrimaryVertices")
),
## [optional] : when omitted the verbosity level is set to STANDARD
monitoring = cms.PSet(
verbosity = cms.string("DEBUG")
),
## [optional] : when omitted all monitoring plots for primary vertices
## will be filled w/o extras
# pvExtras = cms.PSet(
## when omitted electron plots will be filled w/o additional pre-
## selection of the primary vertex candidates
# select = cms.string("abs(x)<1. & abs(y)<1. & abs(z)<20. & tracksSize>3 & !isFake")
# ),
## [optional] : when omitted all monitoring plots for electrons
## will be filled w/o extras
elecExtras = cms.PSet(
## when omitted electron plots will be filled w/o cut on electronId
##electronId = cms.PSet( src = cms.InputTag("mvaTrigV0"), cutValue = cms.double(0.5) ),
## when omitted electron plots will be filled w/o additional pre-
## selection of the electron candidates
select = cms.string("pt>25"), ## & abs(eta)<2.5 & (dr03TkSumPt+dr03EcalRecHitSumEt+dr03HcalTowerSumEt)/pt<0.1"),
## when omitted isolated electron multiplicity plot will be equi-
## valent to inclusive electron multiplicity plot
## isolation = cms.string(ElelooseIsoCut),
),
## [optional] : when omitted all monitoring plots for jets
## will be filled w/o extras
jetExtras = cms.PSet(
## when omitted monitor plots for pt will be filled from uncorrected
## jets
jetCorrector = cms.string("topDQMak5PFCHSL2L3"),
## when omitted monitor plots will be filled w/o additional cut on
## jetID
# jetID = cms.PSet(
# label = cms.InputTag("ak4JetID"),
# select = cms.string(" ")
# ),
## when omitted no extra selection will be applied on jets before
## filling the monitor histograms; if jetCorrector is present the
## selection will be applied to corrected jets
select = cms.string("pt>15 & abs(eta)<2.5"), ## & emEnergyFraction>0.01"),
## when omitted monitor histograms for b-tagging will not be filled
jetBTaggers = cms.PSet(
trackCountingEff = cms.PSet(
label = cms.InputTag("pfTrackCountingHighEffBJetTags" ),
workingPoint = cms.double(1.25)
),
trackCountingPur = cms.PSet(
label = cms.InputTag("pfTrackCountingHighPurBJetTags" ),
workingPoint = cms.double(3.41)
),
secondaryVertex = cms.PSet(
label = cms.InputTag("pfSimpleSecondaryVertexHighEffBJetTags"),
workingPoint = cms.double(2.05)
),
combinedSecondaryVertex = cms.PSet(
label = cms.InputTag("pfCombinedInclusiveSecondaryVertexV2BJetTags"),
workingPoint = cms.double(0.970)
)
)
),
## [optional] : when omitted no mass window will be applied
## for the W mass before filling the event monitoring plots
# massExtras = cms.PSet(
# lowerEdge = cms.double( 70.),
# upperEdge = cms.double(110.)
# ),
## [optional] : when omitted the monitoring plots for triggering
## will be empty
# triggerExtras = cms.PSet(
# src = cms.InputTag("TriggerResults","","HLT"),
# paths = cms.vstring([ 'HLT_Ele15_LW_L1R:HLT_QuadJetU15'])
## paths = cms.vstring([''])
# )
),
## ------------------------------------------------------
## PRESELECTION
##
## setup of the event preselection, which will not
## be monitored
## [mandatory] : but may be empty
##
preselection = cms.PSet(
## [optional] : when omitted no preselection is applied
# trigger = cms.PSet(
# src = cms.InputTag("TriggerResults","","HLT"),
# select = cms.vstring(['HLT_Ele15_SW_CaloEleId_L1R'])
# ),
## [optional] : when omitted no preselection is applied
# vertex = cms.PSet(
# src = cms.InputTag("offlinePrimaryVertices"),
# select = cms.string('!isFake && ndof >= 4 && abs(z)<24. && position.Rho <= 2.0')
# )
),
## ------------------------------------------------------
## SELECTION
##
## monitor histrograms are filled after each selection
## step, the selection is applied in the order defined
## by this vector
## [mandatory] : may be empty or contain an arbitrary
## number of PSets
selection = cms.VPSet(
cms.PSet(
label = cms.string("presel"),
src = cms.InputTag("offlinePrimaryVertices"),
select = cms.string('!isFake && ndof >= 4 && abs(z)<24. && position.Rho <= 2.0'),
),
cms.PSet(
label = cms.string("elecs/pf:step0"),
src = cms.InputTag("pfIsolatedElectronsEI"),
## electronId = cms.PSet( src = cms.InputTag("mvaTrigV0"), cutValue = cms.double(0.5) ),
select = cms.string("pt>30 & abs(eta)<2.5 & abs(gsfElectronRef.gsfTrack.d0)<0.02 && gsfElectronRef.gsfTrack.hitPattern().numberOfHits('MISSING_INNER_HITS') <= 0 && (abs(gsfElectronRef.superCluster.eta) <= 1.4442 || abs(gsfElectronRef.superCluster.eta) >= 1.5660) && " + EletightIsoCut),
min = cms.int32(1),
max = cms.int32(1),
),
cms.PSet(
label = cms.string("jets/pf:step1"),
src = cms.InputTag("ak4PFJetsCHS"),
jetCorrector = cms.string("topDQMak5PFCHSL2L3"),
select = cms.string("pt>30 & abs(eta)<4.5 & numberOfDaughters>1 & ((abs(eta)>2.4) || ( chargedHadronEnergyFraction > 0 & chargedMultiplicity>0 & chargedEmEnergyFraction<0.99)) & neutralEmEnergyFraction < 0.99 & neutralHadronEnergyFraction < 0.99"),
min = cms.int32(1),
max = cms.int32(1),
),
cms.PSet(
label = cms.string("jets/pf:step2"),
src = cms.InputTag("ak4PFJetsCHS"),
jetCorrector = cms.string("topDQMak5PFCHSL2L3"),
select = cms.string("pt>30 & abs(eta)<4.5 & numberOfDaughters>1 & ((abs(eta)>2.4) || ( chargedHadronEnergyFraction > 0 & chargedMultiplicity>0 & chargedEmEnergyFraction<0.99)) & neutralEmEnergyFraction < 0.99 & neutralHadronEnergyFraction < 0.99"),
min = cms.int32(2),
max = cms.int32(2),
),
)
)
| 47.085903 | 574 | 0.596903 | 2,230 | 21,377 | 5.700448 | 0.125561 | 0.03304 | 0.03068 | 0.021476 | 0.895453 | 0.875236 | 0.873112 | 0.871617 | 0.859975 | 0.846602 | 0 | 0.030332 | 0.261262 | 21,377 | 453 | 575 | 47.189845 | 0.774633 | 0.512934 | 0 | 0.755102 | 0 | 0.061224 | 0.39285 | 0.241438 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.005102 | 0 | 0.005102 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8f0e4a03ec8085cbe021ef188a2d68be34464d85 | 5,838 | py | Python | cookie_test.py | tor4z/python_test | 6b18110b4e82ad00a065b03d0ee8f7f331b2f874 | [
"Unlicense"
] | null | null | null | cookie_test.py | tor4z/python_test | 6b18110b4e82ad00a065b03d0ee8f7f331b2f874 | [
"Unlicense"
] | null | null | null | cookie_test.py | tor4z/python_test | 6b18110b4e82ad00a065b03d0ee8f7f331b2f874 | [
"Unlicense"
] | null | null | null | from http import cookies, cookiejar
rawcookie = b"""
b'HTTP/1.1 200 OK\r\nServer: Tengine\r\nContent-Type: application/json\r\nTransfer-Encoding: chunked\r\nConnection: keep-alive\r\nVary: Accept-Encoding\r\nDate: Tue, 08 May 2018 22:19:06 GMT\r\nVary: Accept-Encoding\r\nX-Powered-By: PHP/7.0.13\r\nSet-Cookie: ac_username=%E5%A6%82%E6%9E%9C%E7%94%B5%E8%AF%9D%E4%BA%AD; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.acfun.cn\r\nSet-Cookie: ac_userimg=http://cdn.aixifan.com/dotnet/artemis/u/cms/www/201801/24110804az0eyq3e.jpg; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.acfun.cn\r\nSet-Cookie: auth_key=827525; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.acfun.cn\r\nSet-Cookie: auth_key_ac_sha1=-1547565454; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.acfun.cn\r\nSet-Cookie: auth_key_ac_sha1_=WDMrecQAhtUC++fk5emg7dgd4sa2=; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.acfun.cn\r\nSet-Cookie: checkEmail=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; pat'
b'h=/; domain=.acfun.cn\r\nSet-Cookie: checkMobile=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.acfun.cn\r\nSet-Cookie: userGroupLevel=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.acfun.cn\r\nSet-Cookie: checkReal=0; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.acfun.cn\r\nSet-Cookie: ac_username=%E5%A6%82%E6%9E%9C%E7%94%B5%E8%AF%9D%E4%BA%AD; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: ac_userimg=http://cdn.aixifan.com/dotnet/artemis/u/cms/www/201801/24110804az0eyq3e.jpg; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: auth_key=827525; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: auth_key_ac_sha1=-1547565454; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: auth_key_ac_sha1_=WDMrecQAhtUC++fk5emg7dgd4sa2=; expires=Thu, 07-Jun-2018 22:19:'
b'06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: checkEmail=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: checkMobile=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: userGroupLevel=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: checkReal=0; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: ac_username=%E5%A6%82%E6%9E%9C%E7%94%B5%E8%AF%9D%E4%BA%AD; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: ac_userimg=http://cdn.aixifan.com/dotnet/artemis/u/cms/www/201801/24110804az0eyq3e.jpg; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: auth_key=827525; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: auth_key_ac_sha1=-1547565454; expires=Thu, 07-Jun-2018 22:'
b'19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: auth_key_ac_sha1_=WDMrecQAhtUC++fk5emg7dgd4sa2=; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: checkEmail=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: checkMobile=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: userGroupLevel=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: checkReal=0; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.hapame.com\r\nSet-Cookie: ac_login_error=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT; Max-Age=0; path=/login; domain=http://www.acfun.cn\r\nCache-Control: no-cache\r\nSet-Cookie: XSRF-TOKEN=eyJpdiI6IldvM1hCd3E5UjBhZjlTQXNNTDRwcUE9PSIsInZhbHVlIjoiWmlnSjFBUVVtblZaMTlUdFpRSmlaY1lONUdING1TQ2Q1Z3IrVVFIWmlBY0VUU3FRc3NYOUlYOGdnZ0tpWEV6NXRsZTUzTHQyejRESU1sR3pwdGxhQkE9PSIsIm1hYyI6ImJhNjA3OTRkOTUzZmEzM'
b'WUyMTBiZDM2ZWQyYjEwYTJmMDkwZjA4ZmZkMTJjMDg3ZjE3YWVkNGQzMTMxZjM3ZjQifQ%3D%3D; expires=Wed, 09-May-2018 00:19:06 GMT; Max-Age=7200; path=/; domain=.acfun.cn\r\nSet-Cookie: ap_session=eyJpdiI6IlBodFwveXlacmFkbmJtZmI3UlJNQ2V3PT0iLCJ2YWx1ZSI6IjBOcGZnZ1ZXbFpPVFVIQ1F2Q1JCRVB1eUhGYTRxUmt3Yld0RVlVZmF0VGRkekhsR3ZPdlRCSnpRZVc1aVl1MGpMNDFsaVNUZVloT2dnbGxhZGIzWEdRPT0iLCJtYWMiOiIxYzgwNTBjYmRiMTI5ODczMjUyYWIzYjlmOTQ4MTUyN2Q4N2Q3MjQ2NTk2ZmJmNjI1YTY3Nzk3ODVhMjk2MGM0In0%3D; expires=Wed, 09-May-2018 00:19:06 GMT; Max-Age=7200; path=/; domain=.acfun.cn; HttpOnly\r\nVia: cache15.l2st4-2[211,200-0,M], cache18.l2st4-2[212,0], kunlun7.cn116[217,200-0,M], kunlun4.cn116[218,0]\r\nX-Cache: MISS TCP_MISS dirn:-2:-2 mlen:-1\r\nX-Swift-SaveTime: Tue, 08 May 2018 22:19:06 GMT\r\nX-Swift-CacheTime: 0\r\nTiming-Allow-Origin: *\r\nEagleId: 7793970415258179458416943e\r\n\r\nb3\r\n{"success":true,"img":"http:\\/\\/cdn.aixifan.com\\/dotnet\\/artemis\\/u\\/cms\\/www\\/201801\\/24110804az0eyq3e.jpg","username":"\\u5982\\u679c\\u7535\\u8bdd\\u4ead","errorid":0,"waiting":111}\r\n0\r\n\r\n'
"""
lst = rawcookie.decode().split("\r\n")
for item in lst:
print("---")
print(item)
# pice_cookie = b"Set-Cookie: checkEmail=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\nSet-Cookie: checkMobile=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com\r\n"
# cookie = cookies.SimpleCookie()
# cookie.load(b"checkEmail=1; expires=Thu, 07-Jun-2018 22:19:06 GMT; Max-Age=2592000; path=/; domain=.aixifan.com".decode())
# print(cookie.output()) | 307.263158 | 1,067 | 0.757794 | 995 | 5,838 | 4.411055 | 0.171859 | 0.038733 | 0.052632 | 0.080201 | 0.712235 | 0.702894 | 0.699932 | 0.697425 | 0.697425 | 0.6874 | 0 | 0.179585 | 0.058582 | 5,838 | 19 | 1,068 | 307.263158 | 0.618996 | 0.072285 | 0 | 0 | 0 | 0.416667 | 0.973208 | 0.471175 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.083333 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8f5d899979da6802690056da75491d4db41a4e93 | 141 | py | Python | src/modules/sys_functions/__init__.py | Nobregaigor/FEBio-Python | 1ad5578af00e44bd6def06ee17538ac5e4375a38 | [
"MIT"
] | null | null | null | src/modules/sys_functions/__init__.py | Nobregaigor/FEBio-Python | 1ad5578af00e44bd6def06ee17538ac5e4375a38 | [
"MIT"
] | null | null | null | src/modules/sys_functions/__init__.py | Nobregaigor/FEBio-Python | 1ad5578af00e44bd6def06ee17538ac5e4375a38 | [
"MIT"
] | null | null | null | from .get_sys_args import *
from .find_files_in_folder import *
from .read_files import *
from .write_files import *
from .next_path import * | 28.2 | 35 | 0.794326 | 23 | 141 | 4.521739 | 0.565217 | 0.384615 | 0.288462 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.134752 | 141 | 5 | 36 | 28.2 | 0.852459 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
56e1a6e25709680e68744b663faf7812700a764c | 107 | py | Python | skypond/games/four_keys/agents/__init__.py | upkoi/skypond | 5e366a18f2c5c85ce7b092d69b28c8f8aaad8718 | [
"MIT"
] | null | null | null | skypond/games/four_keys/agents/__init__.py | upkoi/skypond | 5e366a18f2c5c85ce7b092d69b28c8f8aaad8718 | [
"MIT"
] | null | null | null | skypond/games/four_keys/agents/__init__.py | upkoi/skypond | 5e366a18f2c5c85ce7b092d69b28c8f8aaad8718 | [
"MIT"
] | 2 | 2019-06-13T18:08:01.000Z | 2019-06-17T02:42:19.000Z | from __future__ import absolute_import
from . import random_accumulating_agent
from . import random_agent
| 21.4 | 39 | 0.859813 | 14 | 107 | 6 | 0.5 | 0.238095 | 0.380952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121495 | 107 | 4 | 40 | 26.75 | 0.893617 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
71339dbb9f4301affdd499a27de92a721e7e686a | 19,952 | py | Python | app/ui.py | BeingGod/ISBN-Recognize-System | 89dad5f6f813000054646aff9bc539d8f9ea2082 | [
"Apache-2.0"
] | 1 | 2021-10-31T07:55:05.000Z | 2021-10-31T07:55:05.000Z | app/ui.py | BeingGod/ISBN-Recognize-System | 89dad5f6f813000054646aff9bc539d8f9ea2082 | [
"Apache-2.0"
] | null | null | null | app/ui.py | BeingGod/ISBN-Recognize-System | 89dad5f6f813000054646aff9bc539d8f9ea2082 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'app_ui.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.runButton = QtWidgets.QPushButton(self.centralwidget)
self.runButton.setGeometry(QtCore.QRect(480, 500, 161, 51))
font = QtGui.QFont()
font.setPointSize(17)
self.runButton.setFont(font)
self.runButton.setObjectName("runButton")
self.imageLabel = QtWidgets.QLabel(self.centralwidget)
self.imageLabel.setGeometry(QtCore.QRect(120, 20, 571, 401))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.imageLabel.setPalette(palette)
self.imageLabel.setAutoFillBackground(True)
self.imageLabel.setStyleSheet("")
self.imageLabel.setObjectName("imageLabel")
self.openImgButton = QtWidgets.QPushButton(self.centralwidget)
self.openImgButton.setGeometry(QtCore.QRect(180, 500, 161, 51))
font = QtGui.QFont()
font.setPointSize(17)
self.openImgButton.setFont(font)
self.openImgButton.setObjectName("openImgButton")
self.outputLabel = QtWidgets.QLabel(self.centralwidget)
self.outputLabel.setGeometry(QtCore.QRect(220, 440, 471, 41))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.outputLabel.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(17)
self.outputLabel.setFont(font)
self.outputLabel.setStyleSheet("QLabel\n"
"{\n"
" background-color:\"white\"\n"
"}")
self.outputLabel.setScaledContents(False)
self.outputLabel.setAlignment(QtCore.Qt.AlignCenter)
self.outputLabel.setObjectName("outputLabel")
self.resultLabel_2 = QtWidgets.QLabel(self.centralwidget)
self.resultLabel_2.setGeometry(QtCore.QRect(120, 440, 81, 41))
font = QtGui.QFont()
font.setPointSize(15)
self.resultLabel_2.setFont(font)
self.resultLabel_2.setObjectName("resultLabel_2")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 20))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.runButton.setText(_translate("MainWindow", "RUN"))
self.imageLabel.setText(_translate("MainWindow", "<html><head/><body col><p><br/></p></body></html>"))
self.openImgButton.setText(_translate("MainWindow", "OPEN"))
self.outputLabel.setText(_translate("MainWindow", "none"))
self.resultLabel_2.setText(_translate("MainWindow", "Result:"))
| 57.498559 | 110 | 0.694868 | 2,321 | 19,952 | 5.966825 | 0.060319 | 0.170843 | 0.103979 | 0.136472 | 0.861001 | 0.845115 | 0.845115 | 0.842516 | 0.842516 | 0.842516 | 0 | 0.046075 | 0.180884 | 19,952 | 346 | 111 | 57.66474 | 0.801322 | 0.008972 | 0 | 0.835329 | 1 | 0 | 0.013559 | 0.001518 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005988 | false | 0 | 0.002994 | 0 | 0.011976 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.