hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
95ba6ec964da8e4fcf2344741431614d2e3d0148
12,429
py
Python
rbnf/core/parser_algo/for_atom.py
thautwarm/Ruiko
cceec88c90f7ec95c160cfda01bfc532610985e0
[ "MIT" ]
44
2018-06-08T04:52:02.000Z
2021-08-06T04:59:56.000Z
rbnf/core/parser_algo/for_atom.py
thautwarm/Ruiko
cceec88c90f7ec95c160cfda01bfc532610985e0
[ "MIT" ]
2
2018-06-21T14:42:27.000Z
2018-12-10T06:15:46.000Z
rbnf/core/parser_algo/for_atom.py
thautwarm/Ruiko
cceec88c90f7ec95c160cfda01bfc532610985e0
[ "MIT" ]
2
2018-06-18T10:43:20.000Z
2019-05-06T13:51:51.000Z
from ..ParserC import Composed, Atom, Parser, LRFunc from ..Tokenizer import Tokenizer from ..State import State from ..Result import * from ..AST import * from Redy.Opt import feature, constexpr, const, goto, label from typing import Sequence, List staging = (const, constexpr) @Atom.match.case(Atom.Any) def _any_match(_, tokenizers, state): try: token = tokenizers[state.end_index] except IndexError: return Result.mismatched state.new_one() return Result.match(token) @Atom.as_fixed.case(Atom.Any) def as_fixed(self, _): @feature(staging) def any_match(tokenizers, state): try: token = tokenizers[state.end_index] except IndexError: return constexpr[Result.mismatched] state.new_one() return constexpr[Result.match](token) self.match = any_match @Atom.match.case(Atom.Bind) def _bind_match(self: Atom, tokenizers, state): _, name, parser = self result = parser.match(tokenizers, state) if result.status is FindLR: lr_parser, stacked_func = result.value def stacked_func_(ast): stacked_result = stacked_func(ast) if stacked_result.status is Matched: state.ctx = state.ctx.copy() state.ctx[name] = stacked_result.value return stacked_result return Result.find_lr(lr_parser, stacked_func_) elif result.status is Matched: ctx = state.ctx = state.ctx.copy() ctx[name] = result.value return result @Atom.as_fixed.case(Atom.Bind) def as_fixed(self, lang): _, name_, parser_ = self parser_.as_fixed(lang) @feature(staging) def bind_match(tokenizers, state): name: const = name_ match: const = parser_.match result = match(tokenizers, state) if result.status is constexpr[FindLR]: lr_parser, stacked_func = result.value def stacked_func_(ast): stacked_result = stacked_func(ast) if stacked_result.status is Matched: state.ctx = state.ctx.copy() state.ctx[name] = stacked_result.value return stacked_result return constexpr[Result.find_lr](lr_parser, stacked_func_) elif result.status is constexpr[Matched]: ctx = state.ctx = state.ctx.copy() ctx[name] = result.value return result self.match = bind_match return self @Atom.match.case(Atom.Push) def _push_match(self: Atom, tokenizers: Sequence[Tokenizer], state: State): _, name, parser = self result = parser.match(tokenizers, state) if result.status is FindLR: lr_parser, stacked_func = result.value def stacked_func_(ast: AST): stacked_result = stacked_func(ast) if stacked_result.status is Matched: state.ctx = state.ctx.copy() try: state.ctx[name].append(stacked_result.value) except KeyError: state.ctx[name] = [stacked_result.value] return stacked_result return Result.find_lr(lr_parser, stacked_func_) elif result.status is Matched: ctx = state.ctx = state.ctx.copy() try: ctx[name].append(result.value) except KeyError: ctx[name] = [result.value] return result @Atom.as_fixed.case(Atom.Push) def as_fixed(self, lang): _, name_, parser_ = self parser_.as_fixed(lang) @feature(staging) def push_match(tokenizers, state): name: const = name_ match: const = parser_.match result = match(tokenizers, state) if result.status is constexpr[FindLR]: lr_parser, stacked_func = result.value def stacked_func_(ast): stacked_result = stacked_func(ast) if stacked_result.status is Matched: state.ctx = state.ctx.copy() try: state.ctx[name].append(stacked_result.value) except KeyError: state.ctx[name] = [stacked_result.value] return stacked_result return constexpr[Result.find_lr](lr_parser, stacked_func_) elif result.status is constexpr[Matched]: ctx = state.ctx = state.ctx.copy() try: ctx[name].append(result.value) except constexpr[KeyError]: ctx[name] = [result.value] return result self.match = push_match return self @Atom.match.case(Atom.Named) def _named_match(self, tokenizers, state): _, name = self lang = state.lang parser, when, with_, rewrite = lang[name] if when and not when(tokenizers, state): return Result.mismatched lr_marker = (state.end_index, name) if name in state: if lr_marker in state.lr: return Result.mismatched def stacked_func(ast): return Result(Matched, ast) return Result.find_lr(self, stacked_func) with state.leave_with_context_recovery(): state.append(name) state.ctx = {} history = state.commit() result = parser.match(tokenizers, state) if result.status is Matched: if with_ and not with_(tokenizers, state): return Result.mismatched return Result( Matched, rewrite(state) if rewrite else Named(name, result.value)) elif result.status is FindLR: parser_obj, stacked_func = result.value if parser_obj is not self: def stacked_func_(ast: AST): stacked_result = stacked_func(ast) if stacked_result.status is Matched: return Result.match( rewrite(state) if rewrite else Named( name, stacked_result.value)) return stacked_result return Result.find_lr(parser_obj, stacked_func_) else: return Result.mismatched # find lr and state.lr_name is name with state.left_recursion(lr_marker): state.reset(history) original_ctx = state.ctx.copy() result: Result = parser.match(tokenizers, state) if result.status is Unmatched: return result if with_ and not with_(tokenizers, state): return Result.mismatched head: Named = rewrite(state) if rewrite else Named( name, result.value) while True: with state.leave_with_context_recovery(): state.ctx = original_ctx.copy() res = stacked_func(head) if res.status is Unmatched: break head = rewrite(state) if rewrite else Named( name, res.value) result.value = head return result @Atom.as_fixed.case(Atom.Named) def as_fixed(self, lang): _, name_ = self parser_, when_, with__, rewrite_ = lang[name_] parser_.as_fixed(lang) @feature(staging) def name_match(tokenizers, state: State): when: const = when_ self_: const = self with_: const = with__ name: const = name_ rewrite: const = rewrite_ mismatched: const = Result.mismatched match: const = parser_.match if constexpr[when]: if not when(tokenizers, state): return mismatched lr_marker = (state.end_index, name) if name in state: if lr_marker in state.lr: return mismatched def stacked_func(ast): return Result(Matched, ast) return constexpr[Result.find_lr](self_, stacked_func) with state.leave_with_context_recovery(): state.append(name) state.ctx = {} history = state.commit() result = match(tokenizers, state) if result.status is constexpr[Matched]: if constexpr[with_]: if not with_(tokenizers, state): return mismatched return constexpr[Result]( constexpr[Matched], rewrite(state) if constexpr[rewrite] else constexpr[Named](name, result.value)) elif result.status is constexpr[FindLR]: lr_parser, stacked_func = result.value if lr_parser is not self_: if constexpr[rewrite]: def stacked_func_(ast: AST): stacked_result = stacked_func(ast) if stacked_result.status is Matched: return Result.match(rewrite(state)) return stacked_result else: def stacked_func_(ast: AST): stacked_result = stacked_func(ast) if stacked_result.status is Matched: return Result.match( Named(name, stacked_result.value)) return stacked_result return constexpr[Result.find_lr](lr_parser, stacked_func_) else: return mismatched # find lr and state.lr_name is name with state.left_recursion(lr_marker): state.reset(history) original_ctx = state.ctx.copy() result = match(tokenizers, state) if result.status is constexpr[Unmatched]: return result if constexpr[with_]: if not with_(tokenizers, state): return mismatched head = rewrite( state) if constexpr[rewrite] else constexpr[Named]( name, result.value) while True: with state.leave_with_context_recovery(): state.ctx = original_ctx.copy() res = stacked_func(head) if res.status is constexpr[Unmatched]: break head = rewrite( state) if constexpr[rewrite] else constexpr[Named]( name, res.value) result.value = head return result self.match = name_match @Atom.as_fixed.case(Atom.Guard) def _guard_as_fixed(self, lang): @feature(staging) def _guard_match(tokenizers: Sequence[Tokenizer], state: State) -> Result: match: const = self[1].match predicate: const = self[2] result = match(tokenizers, state) status = result.status if status is constexpr[Matched]: if not predicate(result.value, state): return constexpr[Result.mismatched] return result elif status is Unmatched: return result lr_parser, stacked_fn_ = result.value predicate_ = predicate matched = Matched mismatched = Result.mismatched def stacked_fn(ast): result = stacked_fn_(ast) if result.status is not matched or not predicate_( result.value, state): return mismatched return result return constexpr[Result.find_lr](lr_parser, stacked_fn) return _guard_match @Atom.match.case(Atom.Guard) def _guard_match(self, tokenizers: Sequence[Tokenizer], state: State) -> Result: result = self[1].match(tokenizers, state) status = result.status if status is Matched: if not self[2](result.value, state): return Result.mismatched return result elif status is Unmatched: return result lr_parser, stacked_fn_ = result.value def stacked_fn(ast: AST): result = stacked_fn_(ast) if result.status is not Matched or not self[2](result.value, state): return Result.mismatched return result return Result.find_lr(lr_parser, stacked_fn) @Atom.as_fixed.case(None) def do_nothing(self, lang): pass
30.764851
79
0.565291
1,348
12,429
5.040059
0.069733
0.060053
0.047395
0.02355
0.824257
0.782602
0.732411
0.706653
0.691492
0.6204
0
0.000625
0.356585
12,429
403
80
30.841191
0.848943
0.005391
0
0.672131
0
0
0
0
0
0
0
0
0
1
0.088525
false
0.003279
0.022951
0.006557
0.304918
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
95f1fe0019b672e234542bcf13339869b178fc79
24
py
Python
amalgkit/__init__.py
kfuku52/amalgk
aae10f9590cb37eac357dc1b6c1d93be2a44d0d2
[ "BSD-3-Clause" ]
3
2019-10-22T08:06:03.000Z
2021-11-29T01:44:18.000Z
amalgkit/__init__.py
kfuku52/amalgk
aae10f9590cb37eac357dc1b6c1d93be2a44d0d2
[ "BSD-3-Clause" ]
91
2019-04-04T12:12:27.000Z
2022-03-11T13:14:43.000Z
amalgkit/__init__.py
kfuku52/amalgk
aae10f9590cb37eac357dc1b6c1d93be2a44d0d2
[ "BSD-3-Clause" ]
1
2021-11-24T13:55:49.000Z
2021-11-24T13:55:49.000Z
__version__ = '0.6.4.12'
24
24
0.666667
5
24
2.4
1
0
0
0
0
0
0
0
0
0
0
0.227273
0.083333
24
1
24
24
0.318182
0
0
0
0
0
0.32
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
25135e0bc8e659cdbe63c01b10813387c0498960
282
py
Python
python/anyascii/_data/_2b0.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
python/anyascii/_data/_2b0.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
python/anyascii/_data/_2b0.py
casept/anyascii
d4f426b91751254b68eaa84c6cd23099edd668e6
[ "ISC" ]
null
null
null
b=' Shi Min Jian Chu Dang Ba Zhuan Li Wu Wei Shuang Min Gui Fei Ai Su Sa Jiu Kou'
282
282
0.20922
20
282
2.95
0.95
0
0
0
0
0
0
0
0
0
0
0
0.780142
282
1
282
282
0.951613
0
0
0
0
0
0.982332
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
1
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
2528940931640539cbcda0608b0e324519dbdba6
129
py
Python
Obiora_Fortune/Phase1/Python Basic 1/Task2.py
CodedLadiesInnovateTech/-python-challenge-solutions
430cd3eb84a2905a286819eef384ee484d8eb9e7
[ "MIT" ]
6
2020-05-23T19:53:25.000Z
2021-05-08T20:21:30.000Z
Obiora_Fortune/Phase1/Python Basic 1/Task2.py
CodedLadiesInnovateTech/-python-challenge-solutions
430cd3eb84a2905a286819eef384ee484d8eb9e7
[ "MIT" ]
8
2020-05-14T18:53:12.000Z
2020-07-03T00:06:20.000Z
Obiora_Fortune/Phase1/Python Basic 1/Task2.py
CodedLadiesInnovateTech/-python-challenge-solutions
430cd3eb84a2905a286819eef384ee484d8eb9e7
[ "MIT" ]
39
2020-05-10T20:55:02.000Z
2020-09-12T17:40:59.000Z
''' 2. Write a Python program to get the Python version you are using Tools: sys module ''' #2 import sys print(sys.version)
16.125
65
0.705426
22
129
4.136364
0.772727
0
0
0
0
0
0
0
0
0
0
0.019417
0.20155
129
8
66
16.125
0.864078
0.689922
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
c2669ef97b9535d329b8261deb940b094031d909
137
py
Python
run.py
earthza001/hack-Facebook-through-the-loophole
f81113f577e849c4f76f5015a6f95bef9ac38201
[ "Unlicense" ]
null
null
null
run.py
earthza001/hack-Facebook-through-the-loophole
f81113f577e849c4f76f5015a6f95bef9ac38201
[ "Unlicense" ]
null
null
null
run.py
earthza001/hack-Facebook-through-the-loophole
f81113f577e849c4f76f5015a6f95bef9ac38201
[ "Unlicense" ]
null
null
null
import os print ("กำลังค้นหาช่องโหว่.....") os.system("wget https://google.com") os.system("clear") time.sleep(7) os.system(clear)
19.571429
36
0.649635
28
137
3.357143
0.75
0.255319
0.276596
0
0
0
0
0
0
0
0
0.008197
0.109489
137
6
37
22.833333
0.721311
0
0
0
0
0
0.372263
0.167883
0
0
0
0
0
0
null
null
0
0.166667
null
null
0.166667
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
c2775cb35ba3f4f16401c150f17f17831e17093c
214
py
Python
geospacepy/__init__.py
rmcgranaghan/geospacepy-lite
9fe60d312fd88bc184b4ed7a358f6f069be02f3b
[ "MIT" ]
null
null
null
geospacepy/__init__.py
rmcgranaghan/geospacepy-lite
9fe60d312fd88bc184b4ed7a358f6f069be02f3b
[ "MIT" ]
null
null
null
geospacepy/__init__.py
rmcgranaghan/geospacepy-lite
9fe60d312fd88bc184b4ed7a358f6f069be02f3b
[ "MIT" ]
null
null
null
""" Geospacepy init file """ #import yaml,pkg_resources #config_file_as_str = pkg_resources.resource_string(__name__,'geospacepy_config') from geospacepy import geospacepy_config config = geospacepy_config.config
23.777778
81
0.831776
27
214
6.111111
0.518519
0.290909
0.266667
0
0
0
0
0
0
0
0
0
0.084112
214
8
82
26.75
0.841837
0.588785
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
c293a387c96cbbeced31fc20445df6d57f9290e4
11,016
py
Python
mypeople.py
MatheusMatozo/Library-software-01
9b6f745226bec25de4563d2fe9d9f378e0530a8d
[ "MIT" ]
null
null
null
mypeople.py
MatheusMatozo/Library-software-01
9b6f745226bec25de4563d2fe9d9f378e0530a8d
[ "MIT" ]
null
null
null
mypeople.py
MatheusMatozo/Library-software-01
9b6f745226bec25de4563d2fe9d9f378e0530a8d
[ "MIT" ]
null
null
null
from tkinter import * import sqlite3 import addpeople from tkinter import messagebox con=sqlite3.connect('database.db') cur = con.cursor() class MyPoeple(Toplevel): def __init__(self): Toplevel.__init__(self) self.geometry("650x650+620+200") self.title("My Poeple") self.resizable(False,False) # Frames self.top = Frame(self, height=150, bg='white') self.top.pack(fill=X) self.bottomFrame = Frame(self, height=500, bg='#fcc324') self.bottomFrame.pack(fill=X) # Heading, image and date self.top_image = PhotoImage(file='icons/person_icon.png') self.top_image_lbl = Label(self.top, image=self.top_image, bg='white') self.top_image_lbl.place(x=120, y=10) self.heading = Label(self.top, text='My Persons', font='arial 15 bold', fg='#003f8a', bg='white') self.heading.place(x=260, y=60) #ScrollBar self.sb=Scrollbar(self.bottomFrame,orient=VERTICAL) #listbox self.listBox=Listbox(self.bottomFrame,width=60,height=31) self.listBox.grid(row=0, column=0,padx=(40,0)) self.sb.config(command= self.listBox.yview) self.listBox.config(yscrollcommand=self.sb.set) self.sb.grid(row=0,column=1,sticky=N+S) persons=cur.execute("SELECT * FROM persons").fetchall() print(persons) count=0 for person in persons: self.listBox.insert(count,str(person[0])+"-"+person[1]+" "+person[2]) count +=1 #Buttons btnadd=Button(self.bottomFrame,text='Add',width=12,font='Sans 12 bold',command=self.funcaddPeople) btnadd.grid(row=0,column=2,sticky=N,padx=10,pady=10) btnupdate = Button(self.bottomFrame, text='Update', width=12, font='Sans 12 bold', command=self.funcUpdatePerson) btnupdate.grid(row=0, column=2, sticky=N, padx=10, pady=50) btndisplay = Button(self.bottomFrame, text='Display', width=12, font='Sans 12 bold', command=self.funcDisplayPerson) btndisplay.grid(row=0, column=2, sticky=N, padx=10, pady=90) btndelete = Button(self.bottomFrame, text='Delete', width=12, font='Sans 12 bold', command=self.funcDeletePerson) btndelete.grid(row=0, column=2, sticky=N, padx=10, pady=130) def funcaddPeople(self): addpage=addpeople.AddPeople() self.destroy() def funcUpdatePerson(self): global person_id selected_item=self.listBox.curselection() person=self.listBox.get(selected_item) person_id=person.split("-")[0] updatepage=Update() def funcDisplayPerson(self): global person_id selected_item = self.listBox.curselection() person= self.listBox.get(selected_item) person_id=person.split("-")[0] displaypage=Display() self.destroy() def funcDeletePerson(self): selected_item=self.listBox.curselection() person=self.listBox.get(selected_item) person_id=person.split("-")[0] mbox= messagebox.askquestion("Warning","Are you sure to delete this person",icon='warning') if mbox == 'yes': try: cur.execute("DELETE FROM persons WHERE person_id=?", (person_id,)) con.commit() messagebox.showinfo("Success","Person has been deleted!") self.destroy() except: messagebox.showinfo("Info","Person has not been deleted!") class Update(Toplevel): def __init__(self): Toplevel.__init__(self) self.geometry("650x750+550+200") self.title("Update Person") self.resizable(False,False) #get person from database global person_id person=cur.execute("SELECT * FROM persons WHERE person_id =?",(person_id,)) person_info = person.fetchall() print(person_info) self.person_id=person_info[0][0] self.person_name=person_info[0][1] self.person_surname=person_info[0][2] self.person_email=person_info[0][3] self.person_phone=person_info[0][4] self.person_address=person_info[0][5] # Frames self.top = Frame(self, height=150, bg='white') self.top.pack(fill=X) self.bottomFrame = Frame(self, height=600, bg='#fcc324') self.bottomFrame.pack(fill=X) # Heading, image and date self.top_image = PhotoImage(file='icons/addperson.png') self.top_image_lbl = Label(self.top, image=self.top_image, bg='white') self.top_image_lbl.place(x=120, y=10) self.heading = Label(self.top, text='My Persons', font='arial 15 bold', fg='#003f8a', bg='white') self.heading.place(x=260, y=60) ############################################################################## # labels and entries # name self.lbl_name = Label(self.bottomFrame, text='Name', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_name.place(x=40, y=40) self.ent_name = Entry(self.bottomFrame, width=30, bd=4) self.ent_name.insert(0, self.person_name) self.ent_name.place(x=150, y=45) # Surname self.lbl_surname = Label(self.bottomFrame, text='Surname', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_surname.place(x=40, y=80) self.ent_surname = Entry(self.bottomFrame, width=30, bd=4) self.ent_surname.insert(0, self.person_surname) self.ent_surname.place(x=150, y=85) # email self.lbl_email = Label(self.bottomFrame, text='Email', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_email.place(x=40, y=120) self.ent_email = Entry(self.bottomFrame, width=30, bd=4) self.ent_email.insert(0, self.person_email) self.ent_email.place(x=150, y=125) # Phone Number self.lbl_phone = Label(self.bottomFrame, text='Phone', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_phone.place(x=40, y=160) self.ent_phone = Entry(self.bottomFrame, width=30, bd=4) self.ent_phone.insert(0, self.person_phone) self.ent_phone.place(x=150, y=165) # Address self.lbl_address = Label(self.bottomFrame, text='Address', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_address.place(x=40, y=300) self.address = Text(self.bottomFrame, width=23, height=15, wrap=WORD) self.address.insert('1.0',self.person_address) self.address.place(x=150, y=200) # Button button = Button(self.bottomFrame, text='Update Person',command=self.updatePerson) button.place(x=270, y=460) self.lift() def updatePerson(self): person_id=self.person_id person_name=self.ent_name.get() person_surname=self.ent_surname.get() person_email = self.ent_email.get() person_phone = self.ent_phone.get() person_address = self.address.get(1.0,'end-1c') try: query= "UPDATE persons set person_name =?, person_surname=?, person_email=?, person_phone=?, person_address =? WHERE person_id=?" cur.execute(query,(person_name,person_surname,person_email,person_phone,person_address,person_id)) con.commit() messagebox.showinfo("Success","Person has been updated") self.destroy() except: messagebox.showinfo("Warning", "Person has not been updated",icon='warning') class Display(Toplevel): def __init__(self): Toplevel.__init__(self) self.geometry("650x750+550+200") self.title("Display Person") self.resizable(False,False) # get person from database global person_id person = cur.execute("SELECT * FROM persons WHERE person_id =?", (person_id,)) person_info = person.fetchall() print(person_info) self.person_id = person_info[0][0] self.person_name = person_info[0][1] self.person_surname = person_info[0][2] self.person_email = person_info[0][3] self.person_phone = person_info[0][4] self.person_address = person_info[0][5] # Frames self.top = Frame(self, height=150, bg='white') self.top.pack(fill=X) self.bottomFrame = Frame(self, height=600, bg='#fcc324') self.bottomFrame.pack(fill=X) # Heading, image and date self.top_image = PhotoImage(file='icons/addperson.png') self.top_image_lbl = Label(self.top, image=self.top_image, bg='white') self.top_image_lbl.place(x=120, y=10) self.heading = Label(self.top, text='My Persons', font='arial 15 bold', fg='#003f8a', bg='white') self.heading.place(x=260, y=60) ############################################################################## # labels and entries # name self.lbl_name = Label(self.bottomFrame, text='Name', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_name.place(x=40, y=40) self.ent_name = Entry(self.bottomFrame, width=30, bd=4) self.ent_name.insert(0, self.person_name) self.ent_name.config(state='disabled') self.ent_name.place(x=150, y=45) # Surname self.lbl_surname = Label(self.bottomFrame, text='Surname', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_surname.place(x=40, y=80) self.ent_surname = Entry(self.bottomFrame, width=30, bd=4) self.ent_surname.insert(0, self.person_surname) self.ent_surname.config(state='disabled') self.ent_surname.place(x=150, y=85) # email self.lbl_email = Label(self.bottomFrame, text='Email', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_email.place(x=40, y=120) self.ent_email = Entry(self.bottomFrame, width=30, bd=4) self.ent_email.insert(0, self.person_email) self.ent_email.config(state='disabled') self.ent_email.place(x=150, y=125) # Phone Number self.lbl_phone = Label(self.bottomFrame, text='Phone', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_phone.place(x=40, y=160) self.ent_phone = Entry(self.bottomFrame, width=30, bd=4) self.ent_phone.insert(0, self.person_phone) self.ent_phone.config(state='disabled') self.ent_phone.place(x=150, y=165) # Address self.lbl_address = Label(self.bottomFrame, text='Address', font='arial 15 bold', fg='white', bg='#fcc324') self.lbl_address.place(x=40, y=300) self.address = Text(self.bottomFrame, width=23, height=15, wrap=WORD) self.address.insert('1.0', self.person_address) self.address.config(state='disabled') self.address.place(x=150, y=200)
40.058182
141
0.608115
1,447
11,016
4.511403
0.130615
0.075827
0.027574
0.029871
0.788756
0.738358
0.738358
0.729013
0.709406
0.702206
0
0.048103
0.241376
11,016
274
142
40.20438
0.733038
0.026235
0
0.656716
0
0
0.113408
0.001991
0
0
0
0
0
1
0.039801
false
0
0.019901
0
0.074627
0.014925
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c2a8282782b8bfb50e02fa31ee322656d45baf66
63
py
Python
happy_shop/models/__init__.py
xingfuggz/django-happy-shop
a20e4ba4cbafa0673bf9e20fdef127942a2fa94a
[ "MIT" ]
null
null
null
happy_shop/models/__init__.py
xingfuggz/django-happy-shop
a20e4ba4cbafa0673bf9e20fdef127942a2fa94a
[ "MIT" ]
null
null
null
happy_shop/models/__init__.py
xingfuggz/django-happy-shop
a20e4ba4cbafa0673bf9e20fdef127942a2fa94a
[ "MIT" ]
null
null
null
from .models import * from .goods import * from .order import *
21
21
0.730159
9
63
5.111111
0.555556
0.434783
0
0
0
0
0
0
0
0
0
0
0.174603
63
3
22
21
0.884615
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c2e20e6babf99770230957051803db2d0ee1ee07
201
py
Python
tests/custom/test_code_to_empty.py
arkhn/cleaning-scripts
ffe88598b476b2e6b53fd06e8ce6092ef0351b19
[ "Apache-2.0" ]
9
2019-03-31T03:46:51.000Z
2020-05-20T13:05:06.000Z
tests/custom/test_code_to_empty.py
arkhn/cleaning-scripts
ffe88598b476b2e6b53fd06e8ce6092ef0351b19
[ "Apache-2.0" ]
18
2019-09-11T09:19:45.000Z
2021-07-13T09:16:23.000Z
tests/custom/test_code_to_empty.py
arkhn/cleaning-scripts
ffe88598b476b2e6b53fd06e8ce6092ef0351b19
[ "Apache-2.0" ]
2
2019-09-18T15:20:10.000Z
2021-07-25T06:46:57.000Z
from scripts import custom def test_code_to_empty(): assert custom.code_to_empty("-1") is None assert custom.code_to_empty("(sans)") is None assert custom.code_to_empty("-2") is not None
25.125
49
0.726368
34
201
4.029412
0.470588
0.175182
0.321168
0.394161
0.591241
0.423358
0.423358
0
0
0
0
0.011905
0.164179
201
7
50
28.714286
0.803571
0
0
0
0
0
0.049751
0
0
0
0
0
0.6
1
0.2
true
0
0.2
0
0.4
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
5
c2fde775c4a09000b73fd86b431c8dade345b559
89
py
Python
flask_accepts/__init__.py
cafetodev/flask_accepts
6d3728a4008f360e0a208064f6620557d270bcdd
[ "BSD-3-Clause" ]
170
2019-05-04T17:53:15.000Z
2022-03-03T21:39:59.000Z
flask_accepts/__init__.py
cafetodev/flask_accepts
6d3728a4008f360e0a208064f6620557d270bcdd
[ "BSD-3-Clause" ]
104
2019-07-04T15:46:15.000Z
2022-01-14T19:18:12.000Z
flask_accepts/__init__.py
cafetodev/flask_accepts
6d3728a4008f360e0a208064f6620557d270bcdd
[ "BSD-3-Clause" ]
55
2019-05-04T22:39:30.000Z
2022-01-13T16:18:13.000Z
from .decorators import accepts, responds # noqa from .utils import for_swagger # noqa
29.666667
49
0.775281
12
89
5.666667
0.75
0
0
0
0
0
0
0
0
0
0
0
0.168539
89
2
50
44.5
0.918919
0.101124
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
6c29557f512672b61aa24e4293816d64e408668b
44
py
Python
api/python/remote.py
SKKU-ESLAB/ANT
5b4c7bdd32224854e86525500fbc0f02616439c0
[ "Apache-2.0" ]
21
2017-09-21T03:42:36.000Z
2021-10-12T06:32:27.000Z
api/python/remote.py
SKKU-ESLAB/ANT
5b4c7bdd32224854e86525500fbc0f02616439c0
[ "Apache-2.0" ]
156
2017-09-21T12:36:13.000Z
2021-12-20T13:20:51.000Z
api/python/remote.py
SKKU-ESLAB/ANT
5b4c7bdd32224854e86525500fbc0f02616439c0
[ "Apache-2.0" ]
24
2017-09-21T03:42:41.000Z
2021-10-12T06:36:49.000Z
def ANTRemoteUI(): raise NotImplemented
14.666667
24
0.75
4
44
8.25
1
0
0
0
0
0
0
0
0
0
0
0
0.181818
44
2
25
22
0.916667
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
5
66649226f4038358cf5130eae373debf3d58eed9
583
py
Python
generate_assets.py
Kovak/YACS
cfb9edd27363eccb9b7f4aefd3b59074efafb979
[ "MIT" ]
10
2015-06-18T03:27:22.000Z
2020-05-18T11:00:43.000Z
generate_assets.py
Kovak/YACS
cfb9edd27363eccb9b7f4aefd3b59074efafb979
[ "MIT" ]
null
null
null
generate_assets.py
Kovak/YACS
cfb9edd27363eccb9b7f4aefd3b59074efafb979
[ "MIT" ]
5
2016-03-10T00:05:10.000Z
2019-03-29T09:49:21.000Z
from geometry import draw_layered_regular_polygon def generate_shield_model(radius, width): radius_color_dict = {1: (radius, (255, 255, 255, 0)), 2: (2., (255, 255, 255, 0)), 2: (.4*width, (255, 255, 255, 125)), 3: (.1*width, (255, 255, 255, 255)), 4: (.4*width, (255, 255, 255, 125)), 5: (2.,(255, 255, 255, 0))} return draw_layered_regular_polygon((0., 0.), 5, 32, (0, 0, 0, 0), radius_color_dict)
53
71
0.439108
71
583
3.43662
0.352113
0.319672
0.258197
0.122951
0.286885
0.147541
0
0
0
0
0
0.248555
0.406518
583
11
72
53
0.456647
0
0
0
1
0
0
0
0
0
0
0
0
1
0.1
false
0
0.1
0
0.3
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
666e812232e47585198f68d51169628a66c36ff6
2,516
py
Python
xinshuo_visualization/test/image_vis/test_visualize_image_with_pts.py
xinshuoweng/cv_ml_tool
1918b9e37ec5fb8148b8a089f226a4864d67b153
[ "MIT" ]
31
2020-03-05T12:27:21.000Z
2022-03-07T04:00:18.000Z
xinshuo_visualization/test/image_vis/test_visualize_image_with_pts.py
xinshuoweng/cv_ml_tool
1918b9e37ec5fb8148b8a089f226a4864d67b153
[ "MIT" ]
null
null
null
xinshuo_visualization/test/image_vis/test_visualize_image_with_pts.py
xinshuoweng/cv_ml_tool
1918b9e37ec5fb8148b8a089f226a4864d67b153
[ "MIT" ]
12
2020-07-06T05:06:58.000Z
2021-11-18T14:43:20.000Z
# Author: Xinshuo Weng # email: xinshuo.weng@gmail.com import numpy as np from PIL import Image import init_paths from xinshuo_visualization import visualize_image_with_pts def test_visualize_image_with_pts(): image_path = '../lena.png' print('testing basic') img = Image.open(image_path).convert('L') pts_array = [[200, 300], [400, 400]] visualize_image_with_pts(img, pts_array, vis=True) print('testing basic') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 1], [400, 400, 1]] visualize_image_with_pts(img, pts_array, vis=True) print('testing color index') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 1], [400, 400, 1]] visualize_image_with_pts(img, pts_array, color_index=1, vis=True) print('testing pts size') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 1], [400, 400, 1]] visualize_image_with_pts(img, pts_array, pts_size=100, vis=True) print('testing vis threshold') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 0.4], [400, 400, 0.2]] visualize_image_with_pts(img, pts_array, vis=True) print('testing vis threshold') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 0.4], [400, 400, 0.8]] visualize_image_with_pts(img, pts_array, vis_threshold=0.7, vis=True) print('testing vis threshold') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 1], [400, 400, 0]] visualize_image_with_pts(img, pts_array, vis_threshold=0.7, vis=True) print('testing labels') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 0.4], [400, 400, 0.8]] visualize_image_with_pts(img, pts_array, label=True, vis=True) print('testing label list') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 0.4], [400, 400, 0.8]] visualize_image_with_pts(img, pts_array, label_list=['2', '6'], vis=True) print('testing label size') img = Image.open(image_path).convert('L') pts_array = [[200, 300, 0.4], [400, 400, 0.8]] visualize_image_with_pts(img, pts_array, label_list=['2', '6'], label_size=100, vis=True) print('testing a dict of pts') img = Image.open(image_path).convert('L') pts_array1 = [[200, 300, 0.4], [400, 400, 0.8]] pts_array2 = [[100, 100, 0.4], [50, 50, 0.2], [150, 150, 0.6]] pts_array = {'pts1': pts_array1, 'pts2': pts_array2} visualize_image_with_pts(img, pts_array, label_list=['3', '6', '9', '12'], vis=True) print('\n\nDONE! SUCCESSFUL!!\n') if __name__ == '__main__': test_visualize_image_with_pts()
34.944444
91
0.692369
422
2,516
3.890995
0.165877
0.107186
0.153471
0.17905
0.782582
0.722899
0.702801
0.702801
0.673569
0.648599
0
0.096671
0.128378
2,516
72
92
34.944444
0.652075
0.019873
0
0.509091
0
0
0.107955
0
0
0
0
0
0
1
0.018182
false
0
0.072727
0
0.090909
0.218182
0
0
0
null
0
0
1
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
668593d9bf3a477f2b611516f9344b29b307cbf6
27
py
Python
Utils/__init__.py
mst-solar-car/kicad-bom-generator
2aae905056d06f3d25343a8d784049c141d05640
[ "MIT" ]
3
2018-02-26T12:31:41.000Z
2020-10-10T14:14:11.000Z
Utils/__init__.py
mst-solar-car/kicad-bom-generator
2aae905056d06f3d25343a8d784049c141d05640
[ "MIT" ]
null
null
null
Utils/__init__.py
mst-solar-car/kicad-bom-generator
2aae905056d06f3d25343a8d784049c141d05640
[ "MIT" ]
null
null
null
from .string_utils import *
27
27
0.814815
4
27
5.25
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
27
1
27
27
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
66911cd21f5e749f9569cc58b621e2d971e4d9b3
43
py
Python
example_package/calculations.py
kevinglasson/example-package
5fa750850c2c060e8cfdea8aca347df094c2d18e
[ "MIT" ]
null
null
null
example_package/calculations.py
kevinglasson/example-package
5fa750850c2c060e8cfdea8aca347df094c2d18e
[ "MIT" ]
null
null
null
example_package/calculations.py
kevinglasson/example-package
5fa750850c2c060e8cfdea8aca347df094c2d18e
[ "MIT" ]
null
null
null
def double(x: int) -> int: return x * 2
21.5
26
0.55814
8
43
3
0.75
0
0
0
0
0
0
0
0
0
0
0.032258
0.27907
43
2
27
21.5
0.741935
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
669359d89273bdce7b720f99199951676b86af03
8,911
py
Python
kscipy/data/job/ttypes.py
lbn/ksci
53b30d2e5f0937d4040fcfd635c0642150e74388
[ "MIT" ]
5
2021-06-22T03:39:01.000Z
2021-12-15T08:02:51.000Z
kscipy/data/job/ttypes.py
lbn/ksci
53b30d2e5f0937d4040fcfd635c0642150e74388
[ "MIT" ]
null
null
null
kscipy/data/job/ttypes.py
lbn/ksci
53b30d2e5f0937d4040fcfd635c0642150e74388
[ "MIT" ]
1
2021-06-30T14:40:06.000Z
2021-06-30T14:40:06.000Z
# # Autogenerated by Thrift Compiler (0.14.1) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py # from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException from thrift.protocol.TProtocol import TProtocolException from thrift.TRecursive import fix_spec import sys from thrift.transport import TTransport all_structs = [] class LogWrite(object): """ Attributes: - job_id - log_id - line """ def __init__(self, job_id=None, log_id=None, line=None,): self.job_id = job_id self.log_id = log_id self.line = line def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.job_id = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.log_id = iprot.readBinary() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.line = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('LogWrite') if self.job_id is not None: oprot.writeFieldBegin('job_id', TType.STRING, 1) oprot.writeBinary(self.job_id) oprot.writeFieldEnd() if self.log_id is not None: oprot.writeFieldBegin('log_id', TType.STRING, 2) oprot.writeBinary(self.log_id) oprot.writeFieldEnd() if self.line is not None: oprot.writeFieldBegin('line', TType.STRING, 3) oprot.writeString(self.line.encode('utf-8') if sys.version_info[0] == 2 else self.line) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class JobStatusUpdate(object): """ Attributes: - job_id - status - message """ def __init__(self, job_id=None, status=None, message=None,): self.job_id = job_id self.status = status self.message = message def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.job_id = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.STRING: self.status = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: self.message = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('JobStatusUpdate') if self.job_id is not None: oprot.writeFieldBegin('job_id', TType.STRING, 1) oprot.writeBinary(self.job_id) oprot.writeFieldEnd() if self.status is not None: oprot.writeFieldBegin('status', TType.STRING, 2) oprot.writeString(self.status.encode('utf-8') if sys.version_info[0] == 2 else self.status) oprot.writeFieldEnd() if self.message is not None: oprot.writeFieldBegin('message', TType.STRING, 3) oprot.writeString(self.message.encode('utf-8') if sys.version_info[0] == 2 else self.message) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class JobDuration(object): """ Attributes: - job_id - duration """ def __init__(self, job_id=None, duration=None,): self.job_id = job_id self.duration = duration def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.job_id = iprot.readBinary() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.duration = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('JobDuration') if self.job_id is not None: oprot.writeFieldBegin('job_id', TType.STRING, 1) oprot.writeBinary(self.job_id) oprot.writeFieldEnd() if self.duration is not None: oprot.writeFieldBegin('duration', TType.I32, 2) oprot.writeI32(self.duration) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) all_structs.append(LogWrite) LogWrite.thrift_spec = ( None, # 0 (1, TType.STRING, 'job_id', 'BINARY', None, ), # 1 (2, TType.STRING, 'log_id', 'BINARY', None, ), # 2 (3, TType.STRING, 'line', 'UTF8', None, ), # 3 ) all_structs.append(JobStatusUpdate) JobStatusUpdate.thrift_spec = ( None, # 0 (1, TType.STRING, 'job_id', 'BINARY', None, ), # 1 (2, TType.STRING, 'status', 'UTF8', None, ), # 2 (3, TType.STRING, 'message', 'UTF8', None, ), # 3 ) all_structs.append(JobDuration) JobDuration.thrift_spec = ( None, # 0 (1, TType.STRING, 'job_id', 'BINARY', None, ), # 1 (2, TType.I32, 'duration', None, None, ), # 2 ) fix_spec(all_structs) del all_structs
33.626415
139
0.570755
1,023
8,911
4.743891
0.121212
0.027818
0.03709
0.0408
0.789821
0.753555
0.711313
0.697713
0.697713
0.69421
0
0.011466
0.314892
8,911
264
140
33.753788
0.783456
0.029851
0
0.720588
1
0
0.030186
0
0
0
0
0
0
1
0.102941
false
0
0.02451
0.044118
0.230392
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
66d349ca2257569d31fef875c182fa310d3f4893
189
py
Python
lbworkflow/core/exceptions.py
MaxMorais/django-lb-workflow
80c5ee4760fb9fee40b24e03cf6095c8215a2837
[ "MIT" ]
1
2021-12-06T00:41:30.000Z
2021-12-06T00:41:30.000Z
lbworkflow/core/exceptions.py
MaxMorais/django-lb-workflow
80c5ee4760fb9fee40b24e03cf6095c8215a2837
[ "MIT" ]
null
null
null
lbworkflow/core/exceptions.py
MaxMorais/django-lb-workflow
80c5ee4760fb9fee40b24e03cf6095c8215a2837
[ "MIT" ]
1
2021-12-06T00:42:14.000Z
2021-12-06T00:42:14.000Z
class HttpResponseException(Exception): def __init__(self, http_response): super(HttpResponseException, self).__init__(http_response) self.http_response = http_response
37.8
66
0.761905
19
189
6.947368
0.473684
0.363636
0.242424
0
0
0
0
0
0
0
0
0
0.15873
189
4
67
47.25
0.830189
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
66e5306de7c2bbac799c5c3a2ec4687e61ca2f42
252
py
Python
python/comfo/__init__.py
ti-mo/comfo
9d6e04ef0540d399243ba317f3a0a168498acd5c
[ "MIT" ]
3
2017-11-14T11:56:01.000Z
2020-10-24T12:19:10.000Z
python/comfo/__init__.py
ti-mo/comfo
9d6e04ef0540d399243ba317f3a0a168498acd5c
[ "MIT" ]
null
null
null
python/comfo/__init__.py
ti-mo/comfo
9d6e04ef0540d399243ba317f3a0a168498acd5c
[ "MIT" ]
null
null
null
""" Comfo client to be used against the Go-based comfoserver API. Wraps the generated Twirp (protobuf) methods to make them more Pythonic. All public methods have asyncio variants with the 'async_' prefix. """ from .client import Comfo # noqa: F401
28
72
0.761905
38
252
5.026316
0.842105
0
0
0
0
0
0
0
0
0
0
0.014354
0.170635
252
8
73
31.5
0.899522
0.849206
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
dd024955f78f14c4bbd6fcef7e38d163eee82040
262
py
Python
Solo/GUI.py
feiticeiro-tec/banco-team-blue
a5524668b0b42377787804ff00bb4aebd9b3cbf0
[ "MIT" ]
3
2021-06-13T11:31:24.000Z
2021-06-15T21:51:31.000Z
Solo/GUI.py
feiticeiro-tec/banco-team-blue
a5524668b0b42377787804ff00bb4aebd9b3cbf0
[ "MIT" ]
2
2021-06-12T12:55:40.000Z
2021-06-12T15:17:37.000Z
Solo/GUI.py
feiticeiro-tec/banco-team-blue
a5524668b0b42377787804ff00bb4aebd9b3cbf0
[ "MIT" ]
null
null
null
#+++++++++++++++++++++++++++++++++++++++++++++++++++ # Git: @faticeiro-tec - Inst: @spell_ware # Banco Conceitual - Back <> Controll <> Front #+++++++++++++++++++++++++++++++++++++++++++++++++++ from GUI import run if __name__ == '__main__': run()
29.111111
53
0.374046
19
262
4.684211
0.947368
0
0
0
0
0
0
0
0
0
0
0
0.152672
262
9
54
29.111111
0.400901
0.748092
0
0
0
0
0.126984
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
dd1b1b60fe9748c15b407ad5ae41810f316c1c80
111
py
Python
math_lib.py
cu-swe4s-fall-2020/version-control-mad232
6d40887deb637976bc68b2c085ba606aa6c44d8d
[ "MIT" ]
null
null
null
math_lib.py
cu-swe4s-fall-2020/version-control-mad232
6d40887deb637976bc68b2c085ba606aa6c44d8d
[ "MIT" ]
null
null
null
math_lib.py
cu-swe4s-fall-2020/version-control-mad232
6d40887deb637976bc68b2c085ba606aa6c44d8d
[ "MIT" ]
null
null
null
def div(a, b): if b==0: return "error" else: return a/b def add(a,b): return a+b
11.1
22
0.459459
20
111
2.55
0.5
0.156863
0.313725
0
0
0
0
0
0
0
0
0.014925
0.396396
111
9
23
12.333333
0.746269
0
0
0
0
0
0.045455
0
0
0
0
0
0
1
0.285714
false
0
0
0.142857
0.714286
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
dd52539d721588fedfb9705448829c107ce4e4a8
297
py
Python
webApp/admin/endpoints.py
rbakash/flask-boilerplate
58128ae0a3aec48b1917e4b362bb1bb68098bd67
[ "MIT" ]
null
null
null
webApp/admin/endpoints.py
rbakash/flask-boilerplate
58128ae0a3aec48b1917e4b362bb1bb68098bd67
[ "MIT" ]
null
null
null
webApp/admin/endpoints.py
rbakash/flask-boilerplate
58128ae0a3aec48b1917e4b362bb1bb68098bd67
[ "MIT" ]
null
null
null
# Contains all the endpoint related to the user authentication from flask import render_template, redirect from flask import render_template from . import admin # Renders the error.html page @admin.route("/products") def show(): return render_template('shop.html', title='Shop Products')
21.214286
62
0.771044
41
297
5.512195
0.634146
0.185841
0.132743
0.185841
0.256637
0
0
0
0
0
0
0
0.148148
297
13
63
22.846154
0.893281
0.296296
0
0
0
0
0.15122
0
0
0
0
0
0
1
0.166667
true
0
0.5
0.166667
0.833333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
5
dd5fa9663257ce3d1873f60be03f58b5087660ac
156
py
Python
tacos/chicken/urls/v1/__init__.py
robinetbatman/figma-lottie
3de5cd4a478d97ac4cd1bba87cfeac40e1778a8a
[ "MIT" ]
22
2019-05-31T18:04:54.000Z
2021-11-02T21:47:12.000Z
tacos/chicken/urls/v1/__init__.py
robinetbatman/figma-lottie
3de5cd4a478d97ac4cd1bba87cfeac40e1778a8a
[ "MIT" ]
15
2019-05-29T17:22:42.000Z
2021-06-10T21:32:08.000Z
tacos/chicken/urls/v1/__init__.py
robinetbatman/figma-lottie
3de5cd4a478d97ac4cd1bba87cfeac40e1778a8a
[ "MIT" ]
1
2020-10-04T11:51:21.000Z
2020-10-04T11:51:21.000Z
from django.urls import include, path urlpatterns = [ path('', include('chicken.urls.v1.api')), path('oauth', include('chicken.urls.v1.oauth')), ]
22.285714
52
0.660256
20
156
5.15
0.55
0.271845
0.349515
0.38835
0
0
0
0
0
0
0
0.014925
0.141026
156
6
53
26
0.753731
0
0
0
0
0
0.288462
0.134615
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
dd7a9e4d77d0173614d818bd10ce96aeaa177565
88
py
Python
fakesite/http_auth/digest.py
akun/fakesite
19d984011424dba9b9c7641e4ce3603605dd0d43
[ "MIT" ]
2
2015-12-20T06:57:20.000Z
2022-03-17T10:26:57.000Z
fakesite/http_auth/digest.py
akun/fakesite
19d984011424dba9b9c7641e4ce3603605dd0d43
[ "MIT" ]
null
null
null
fakesite/http_auth/digest.py
akun/fakesite
19d984011424dba9b9c7641e4ce3603605dd0d43
[ "MIT" ]
null
null
null
#!/usr/bin/env python from tornado.test.curl_httpclient_test import DigestAuthHandler
17.6
63
0.829545
12
88
5.916667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.090909
88
4
64
22
0.8875
0.227273
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
dda1db34cf04b33722d91f21e62c06d26ae87e79
187
py
Python
5kyu/Luck_check/solution.py
wizh/codewars
bdb421720437a9fcafaa2eda8869a1cd4835bb47
[ "MIT" ]
1
2015-11-25T00:06:13.000Z
2015-11-25T00:06:13.000Z
5kyu/Luck_check/solution.py
wizh/codewars
bdb421720437a9fcafaa2eda8869a1cd4835bb47
[ "MIT" ]
null
null
null
5kyu/Luck_check/solution.py
wizh/codewars
bdb421720437a9fcafaa2eda8869a1cd4835bb47
[ "MIT" ]
null
null
null
def luck_check(s): if len(s) % 2 == 1: s = s[:len(s)//2] + s[len(s)//2 + 1:] return (sum(int(i) for i in s[:len(s)//2]) == sum(int(i) for i in s[len(s)//2:]))
31.166667
60
0.427807
40
187
1.975
0.35
0.253165
0.316456
0.303797
0.481013
0.481013
0.481013
0.481013
0.481013
0.481013
0
0.05303
0.294118
187
5
61
37.4
0.545455
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0
0
0.4
0
0
0
1
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
06d5ce197b1ac0dfe731e5d32cca6a578511eb1b
746
py
Python
interact_blue.py
Driftwood2D/blue
02b5bd3dbbc8fe2836f88c8beaa0955344cc7998
[ "MIT" ]
null
null
null
interact_blue.py
Driftwood2D/blue
02b5bd3dbbc8fe2836f88c8beaa0955344cc7998
[ "MIT" ]
null
null
null
interact_blue.py
Driftwood2D/blue
02b5bd3dbbc8fe2836f88c8beaa0955344cc7998
[ "MIT" ]
null
null
null
def open_door(): # TODO: This needs to be in the stdlib. Driftwood.area.tilemap.layers[2].tile(4, 0).nowalk = None Driftwood.area.tilemap.layers[2].tile(4, 0).setgid(27) def open_door2(): Driftwood.area.tilemap.layers[2].tile(2, 0).nowalk = None Driftwood.area.tilemap.layers[2].tile(2, 0).setgid(27) def get_pearl(): if not _["inventory"].has("blue_pearl"): Driftwood.area.tilemap.layers[2].tile(5, 3).nowalk = None Driftwood.area.tilemap.layers[1].tile(5, 3).setgid(0) _["inventory"].get("blue_pearl", 1) _["inventory"].save() if "blue_pearl_light" in Driftwood.vars and Driftwood.vars["blue_pearl_light"]: Driftwood.light.kill(Driftwood.vars["blue_pearl_light"])
37.3
87
0.662198
111
746
4.324324
0.351351
0.1625
0.25
0.325
0.56875
0.45625
0.316667
0.316667
0.175
0
0
0.040323
0.168901
746
19
88
39.263158
0.733871
0.049598
0
0
0
0
0.134371
0
0
0
0
0.052632
0
1
0.214286
true
0
0
0
0.214286
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
1
0
0
0
0
0
0
5
06de461999a368a89777aaba491918e4ee7d0980
206
py
Python
bot/models/Group.py
Tuarisa/Secret-Santa-Bot
5e741eba245e3fd4d12c1da595061b225493d36a
[ "MIT" ]
12
2018-11-26T14:00:37.000Z
2021-11-26T02:53:15.000Z
bot/models/Group.py
Tuarisa/Secret-Santa-Bot
5e741eba245e3fd4d12c1da595061b225493d36a
[ "MIT" ]
8
2018-11-28T09:49:27.000Z
2021-11-29T04:28:10.000Z
bot/models/Group.py
Tuarisa/Secret-Santa-Bot
5e741eba245e3fd4d12c1da595061b225493d36a
[ "MIT" ]
12
2017-11-17T18:23:22.000Z
2021-12-14T06:43:47.000Z
from sqlalchemy import Column, Integer from bot.Base import Base class Group(Base): __tablename__ = 'groups' id = Column(Integer(), primary_key=True) telegram_id = Column(Integer)
18.727273
45
0.684466
25
206
5.4
0.64
0.288889
0.222222
0
0
0
0
0
0
0
0
0
0.228155
206
10
46
20.6
0.849057
0
0
0
0
0
0.030612
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
660b55524327ea3f603352bee07221f4c7ebc273
9,058
py
Python
test/test_deadlock.py
tkilias/sqlalchemy_exasol
fa9566fb65f59dbc7d78f3e80a3d175925e171e3
[ "BSD-2-Clause" ]
null
null
null
test/test_deadlock.py
tkilias/sqlalchemy_exasol
fa9566fb65f59dbc7d78f3e80a3d175925e171e3
[ "BSD-2-Clause" ]
null
null
null
test/test_deadlock.py
tkilias/sqlalchemy_exasol
fa9566fb65f59dbc7d78f3e80a3d175925e171e3
[ "BSD-2-Clause" ]
null
null
null
# -*- coding: UTF-8 -*- import time from threading import Thread import pytest from sqlalchemy import create_engine from sqlalchemy.testing import fixtures, config import sqlalchemy.testing as testing from sqlalchemy_exasol.base import EXADialect #TODO get_schema_names, get_view_names and get_view_definition didn't cause deadlocks in this scenario @pytest.mark.skipif("turbodbc" in str(testing.db.url), reason="We currently don't support snapshot metadata requests for turbodbc") class MetadataTest(fixtures.TablesTest): __backend__ = True CONNECTION_ECHO = False WATCHDOG_ECHO = False def create_transaction(self, url, con_name): engine = create_engine(config.db.url, echo=self.CONNECTION_ECHO, logging_name="engine" + con_name) session = engine.connect().execution_options(autocommit=False) return engine, session def test_no_deadlock_for_get_table_names_without_fallback(self): def without_fallback(session2, schema, table): dialect = EXADialect() dialect.get_table_names(session2, schema=schema, use_sql_fallback=False) self.run_deadlock_for_table(without_fallback) def test_deadlock_for_get_table_names_with_fallback(self): def with_fallback(session2, schema, table): dialect = EXADialect() dialect.get_table_names(session2, schema=schema, use_sql_fallback=True) with pytest.raises(Exception): self.run_deadlock_for_table(with_fallback) def test_no_deadlock_for_get_columns_without_fallback(self): def without_fallback(session2, schema, table): dialect = EXADialect() dialect.get_columns(session2, schema=schema, table_name=table, use_sql_fallback=False) self.run_deadlock_for_table(without_fallback) def test_no_deadlock_for_get_columns_with_fallback(self): # TODO: Doesnt produce a deadlock anymore since last commit? def with_fallback(session2, schema, table): dialect = EXADialect() dialect.get_columns(session2, schema=schema, table_name=table, use_sql_fallback=True) self.run_deadlock_for_table(with_fallback) def test_no_deadlock_for_get_pk_constraint_without_fallback(self): def without_fallback(session2, schema, table): dialect = EXADialect() dialect.get_pk_constraint(session2, table_name=table, schema=schema, use_sql_fallback=False) self.run_deadlock_for_table(without_fallback) def test_deadlock_for_get_pk_constraint_with_fallback(self): def with_fallback(session2, schema, table): dialect = EXADialect() dialect.get_pk_constraint(session2, table_name=table, schema=schema, use_sql_fallback=True) with pytest.raises(Exception): self.run_deadlock_for_table(with_fallback) def test_no_deadlock_for_get_foreign_keys_without_fallback(self): def without_fallback(session2, schema, table): dialect = EXADialect() dialect.get_foreign_keys(session2, table_name=table, schema=schema, use_sql_fallback=False) self.run_deadlock_for_table(without_fallback) def test_deadlock_for_get_foreign_keys_with_fallback(self): def with_fallback(session2, schema, table): dialect = EXADialect() dialect.get_foreign_keys(session2, table_name=table, schema=schema, use_sql_fallback=True) with pytest.raises(Exception): self.run_deadlock_for_table(with_fallback) def test_no_deadlock_for_get_view_names_without_fallback(self): # TODO: think of other scenarios where metadata deadlocks with view could happen def without_fallback(session2, schema, table): dialect = EXADialect() dialect.get_view_names(session2, table_name=table, schema=schema, use_sql_fallback=False) self.run_deadlock_for_table(without_fallback) def test_no_deadlock_for_get_view_names_with_fallback(self): # TODO: think of other scenarios where metadata deadlocks with view could happen def with_fallback(session2, schema, table): dialect = EXADialect() dialect.get_view_names(session2, table_name=table, schema=schema, use_sql_fallback=True) self.run_deadlock_for_table(with_fallback) def watchdog(self, session0, schema): while self.watchdog_run: rs = session0.execute("SELECT * FROM SYS.EXA_ALL_SESSIONS") rs = [row for row in rs] if self.WATCHDOG_ECHO: print() print("===========================================") print("Watchdog") print("===========================================") for row in rs: if self.WATCHDOG_ECHO: print(row) if row[7] is not None and "Waiting for" in row[7]: if self.WATCHDOG_ECHO: print("Killing session: %s" % row[0]) session0.execute("kill session %s" % row[0]) if self.WATCHDOG_ECHO: print("===========================================") print() time.sleep(10) # Only change with care, lower values might make tests unreliable def run_deadlock_for_table(self, function): c1 = config.db.connect() url = config.db.url schema = "deadlock_get_table_names_test_schema" engine0, session0 = self.create_transaction(url, "transaction0") try: session0.execute("DROP SCHEMA %s cascade" % schema) except: pass session0.execute("CREATE SCHEMA %s" % schema) session0.execute("CREATE OR REPLACE TABLE %s.deadlock_test1 (id int PRIMARY KEY)" % schema) session0.execute( "CREATE OR REPLACE TABLE %s.deadlock_test2 (id int PRIMARY KEY, fk int REFERENCES %s.deadlock_test1(id))" % ( schema, schema)) session0.execute("INSERT INTO %s.deadlock_test1 VALUES 1" % schema) session0.execute("INSERT INTO %s.deadlock_test2 VALUES (1,1)" % schema) session0.execute("commit") self.watchdog_run = True t1 = Thread(target=self.watchdog, args=(session0, schema)) t1.start() try: engine1, session1 = self.create_transaction(url, "transaction1") session1.execute("SELECT 1") session1.execute("SELECT * FROM %s.deadlock_test2" % schema) session1.execute("INSERT INTO %s.deadlock_test1 VALUES 2" % schema) engine3, session3 = self.create_transaction(url, "transaction3") session3.execute("SELECT 1") session3.execute("DELETE FROM %s.deadlock_test2 WHERE false" % schema) session3.execute("commit") engine2, session2 = self.create_transaction(url, "transaction2") session2.execute("SELECT 1") function(session2, schema, "deadlock_test2") session2.execute("commit") session1.execute("commit") except Exception as e: self.watchdog_run = False t1.join() raise e self.watchdog_run = False t1.join() def run_deadlock_for_get_view_names(self, function): c1 = config.db.connect() url = config.db.url schema = "deadlock_get_view_names_test_schema" engine0, session0 = self.create_transaction(url, "transaction0") try: session0.execute("DROP SCHEMA %s cascade" % schema) except: pass session0.execute("CREATE SCHEMA %s" % schema) session0.execute("CREATE OR REPLACE TABLE %s.deadlock_test_table (id int)" % schema) session0.execute( "CREATE OR REPLACE VIEW %s.deadlock_test_view_1 AS SELECT * FROM %s.deadlock_test_table" % (schema, schema)) session0.execute("commit") self.watchdog_run = True t1 = Thread(target=self.watchdog, args=(session0, schema)) t1.start() try: engine1, session1 = self.create_transaction(url, "transaction1") session1.execute("SELECT 1") session1.execute("SELECT * FROM %s.deadlock_test_view_1" % schema) session1.execute( "CREATE OR REPLACE VIEW %s.deadlock_test_view_2 AS SELECT * FROM %s.deadlock_test_table" % ( schema, schema)) engine3, session3 = self.create_transaction(url, "transaction3") session3.execute("SELECT 1") session3.execute("DROP VIEW %s.deadlock_test_view_1" % schema) session3.execute("commit") engine2, session2 = self.create_transaction(url, "transaction2") session2.execute("SELECT 1") function(session2, schema) session2.execute("commit") session1.execute("commit") except Exception as e: self.watchdog_run = False t1.join() raise e self.watchdog_run = False t1.join()
42.327103
131
0.647052
1,052
9,058
5.326046
0.164449
0.043191
0.029984
0.037301
0.783866
0.763876
0.744601
0.721756
0.719258
0.667143
0
0.017884
0.253036
9,058
213
132
42.525822
0.810228
0.044491
0
0.607143
0
0.005952
0.150439
0.043941
0
0
0
0.004695
0
1
0.142857
false
0.011905
0.041667
0
0.214286
0.047619
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
661e4ddef2b78d36040fddd182157a8081054391
45
py
Python
hello.py
wSkyandStar/pythonflask01
07e19273e76bc20fdbbbe5c1f24f5307f5ce6955
[ "Apache-2.0" ]
null
null
null
hello.py
wSkyandStar/pythonflask01
07e19273e76bc20fdbbbe5c1f24f5307f5ce6955
[ "Apache-2.0" ]
null
null
null
hello.py
wSkyandStar/pythonflask01
07e19273e76bc20fdbbbe5c1f24f5307f5ce6955
[ "Apache-2.0" ]
null
null
null
print('hello github') print('second commit!')
22.5
23
0.733333
6
45
5.5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.066667
45
2
23
22.5
0.785714
0
0
0
0
0
0.565217
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
b0c416f02eb0d2b5c7147b4c77fef6ccc49e3f04
113
py
Python
pineboolib/q3widgets/qmenu.py
juanjosepablos/pineboo
f6ce515aec6e0139821bb9c1d62536d9fb50dae4
[ "MIT" ]
null
null
null
pineboolib/q3widgets/qmenu.py
juanjosepablos/pineboo
f6ce515aec6e0139821bb9c1d62536d9fb50dae4
[ "MIT" ]
1
2017-10-30T22:00:48.000Z
2017-11-11T19:34:32.000Z
pineboolib/q3widgets/qmenu.py
juanjosepablos/pineboo
f6ce515aec6e0139821bb9c1d62536d9fb50dae4
[ "MIT" ]
1
2017-10-30T20:16:38.000Z
2017-10-30T20:16:38.000Z
"""Qmenu module.""" from PyQt5 import QtWidgets class QMenu(QtWidgets.QMenu): """QMenu class.""" pass
12.555556
29
0.646018
13
113
5.615385
0.615385
0
0
0
0
0
0
0
0
0
0
0.010989
0.19469
113
8
30
14.125
0.791209
0.230089
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
5
b0d837435416b451da0541661c8dc2145b1ab779
44
py
Python
docs/vscode/01-render-test.py
dmccreary/Python
1ef12355b7933f2d566acdd22d5f8b25c4cbc40c
[ "MIT" ]
null
null
null
docs/vscode/01-render-test.py
dmccreary/Python
1ef12355b7933f2d566acdd22d5f8b25c4cbc40c
[ "MIT" ]
null
null
null
docs/vscode/01-render-test.py
dmccreary/Python
1ef12355b7933f2d566acdd22d5f8b25c4cbc40c
[ "MIT" ]
1
2020-02-06T14:14:38.000Z
2020-02-06T14:14:38.000Z
import turtle t = turtle.Turtle() t.fd(100)
11
19
0.704545
8
44
3.875
0.625
0.451613
0
0
0
0
0
0
0
0
0
0.078947
0.136364
44
3
20
14.666667
0.736842
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
7c036b4a397ea6e39dccb4f13f8057c5485eeb3f
106
py
Python
code/abc008_2_01.py
KoyanagiHitoshi/AtCoder
731892543769b5df15254e1f32b756190378d292
[ "MIT" ]
3
2019-08-16T16:55:48.000Z
2021-04-11T10:21:40.000Z
code/abc008_2_01.py
KoyanagiHitoshi/AtCoder
731892543769b5df15254e1f32b756190378d292
[ "MIT" ]
null
null
null
code/abc008_2_01.py
KoyanagiHitoshi/AtCoder
731892543769b5df15254e1f32b756190378d292
[ "MIT" ]
null
null
null
from collections import Counter print(Counter([input() for i in range(int(input()))]).most_common()[0][0])
53
74
0.726415
17
106
4.470588
0.823529
0
0
0
0
0
0
0
0
0
0
0.020408
0.075472
106
2
74
53
0.755102
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
9fd8584d431a36fbf250f7aa29af34c33e615f35
24,357
py
Python
plugins/_bitmaps.py
genericsoma/XRCed
c4f5077d81eb40e0d885c0be6a867ae17fefe31b
[ "BSD-2-Clause" ]
27
2020-11-12T19:24:54.000Z
2022-03-27T23:10:45.000Z
plugins/_bitmaps.py
genericsoma/XRCed
c4f5077d81eb40e0d885c0be6a867ae17fefe31b
[ "BSD-2-Clause" ]
2
2020-11-02T06:30:39.000Z
2022-02-23T18:39:55.000Z
plugins/_bitmaps.py
genericsoma/XRCed
c4f5077d81eb40e0d885c0be6a867ae17fefe31b
[ "BSD-2-Clause" ]
7
2018-02-13T10:22:39.000Z
2019-07-04T07:39:28.000Z
#---------------------------------------------------------------------- # This file was generated by ../encode_bitmaps.py # from wx.lib.embeddedimage import PyEmbeddedImage wxDialog = PyEmbeddedImage( "iVBORw0KGgoAAAANSUhEUgAAAE8AAABGCAYAAACNO8lOAAAABHNCSVQICAgIfAhkiAAACuBJ" "REFUeJztnFtsHNd5x3/nnJld7nKXt9WFoigpMnWJKdhS1SiyHOfSGGkrtEgNBEjSvqRF0QZ9" "CJDHvgV5SV+bhyJBC6NAYSBtgCYBqgiundSxEMOJA9lWTMmSFZG2xDu5vO1ldmfOpQ8zu1pR" "VOxkWqCu5w8Md3fmzNnz/c73fec7A3DFsZPnaG4sebsOHj88MHr4T621XxdC4JxD8P9TThDb" "5gAhEjt3ttYRc3DOIaT4WrM69x3dWJ25M7usxUOTp/ODYxNfGNl/5B/3n3xS9Y9PenHH7j0M" "432MNyaC0e3EUQTbLb4LTSG9HI35N/Xsa8+bhVtTf/3mSxeeEac+fv5g5fCpm8c++9XcxlaD" "VtAEITq3/voBvCfA/zclEERby+i1dwD74IYOkIr87sMMjx2h3dziyne/oR873P8Vb3D08J/v" "O/lpWV2v0Wxs4ft5hHPIXqfawcGso+PQD/xe2Q2JuJUjnslOl0KIe849qI/4+/6HJsoBUtLe" "WES2qhw88RGUl0Ngk7HEzYSIfdGhaDXrrC/MEK75BLLM0JGzXq059S3PWvv1vtHjrC4s4/cV" "aDdrNN55jVazgZASh7uPj3MW6RcYeug0XqEUk+yViE+1I4t1rpMv8KXE92KgxkI7MigpyHkS" "0TtBLu7DEfeBg5wvYpDJtW67bd+7vY/759YhlE9zdZah3Xtp5SroVkBowZPgSYEQ0NYOYx0F" "X2JzIzTCGQrNdfIjFQpjk7x68T/xhBBYa9HGgrYo3WKiWKc0LEF5XQ8RiRcJKRDOcf32Es3G" "JvhFcL1uLzDWUS4ofudAmYGijxCSVmRZ3Ah4ezWgrR2VkseJiWGW64bp5QZam7uznvxRUnBy" "vB/fU9xYCghaEeC6cyW3RUTHc0QvXHF/4AhnYqcA2u0Wvg05ua/I0lbEwmYbbRzHRwsMFnx+" "OVvHoDDWYqxDa4O1FqUknsPhrMVai9EGXwp+7+wjPHHqCMLvQwpQSiGlTA4FQvIv33+Oi3fA" "GHNPqpVC0I4MB0dyfP7MHgZVwNz8IiOVCl5xPz95s8p3X1mi1FfgYwcFr1y+yvU1hRoYJecp" "BAJjDEFk6c9Jzj9SoWTW+fsbt9iUe/D8HH5CLTIW62KIzoGUAl+CcXEq8KRCO4e123OawSbV" "RBQZynn4gxPDFHzJt1+cp5hTfPkTozTaltfv1NDW4pzDOocxJkkzDq8zQdZZhIsbKU/Rl88h" "fR8pBLIHnvI8fCnwfQ+HS+7riSXpuqFazHlMXbnG09/7EYXyEH/zhfOcn9zH61M3aIWKQ/sO" "cMPXyNoSf/LxSU4fKuELR9N4XLiywtTsFnlP4rVrBPNXGTnYz+ee+BCT+4pYa3ljrsnFK0ss" "bWl2l30+d2aUybEi08sNcn6OnIh4+oUZZmuSvpyHtbHRQkhIYHjCsbTZ4kfX1vjS43v52/MH" "8KSgrR3/9oslWqFGSIVzsYN1bAOBJJklbHwyPu6GQKexSxy9m7ddb/vuchBfdzFUhKMRBFTD" "PNNmnEszLYYH+jiQ3yRcmWaw3E+fJ9Fhi7W1Dd65eY3Z6escGXH80XGfQb2CNlEyUMsXz43z" "qYk8L//kWS6//CLnJ0ucP+Yjwi3+8NHdfObDZaZeucTNV37Myb2w398kmv4pYX0NhMQRO4dL" "4t45hzHxueevVvnpzU32DeaolHx+8OoyL/9qHYlN2sQsrHXdBc7jLovuIWXsbUophBAoFYer" "lPF7IUUye3QTcu8q6rqdgbNxmalKu+kb2AUOdFAjqodYrdFGM9gHR8fKHB0ZwJgIz/MYUCHF" "+gymcQQrwfckHzlSYX5ujmf+40UGxo5x5mzAgUITsXqDIyMPU99a519/+CJLeoBHf3eN8TKY" "oIZhE+dGux7RcRAQWGNphYZT42VOHSix3tBIKfjk8SGuztW5XQ0QMt40xODupgDZMdw6i7M2" "XkmFQ4k4lyiZvCYJXCUJWPTkSmvtPe9tx3WBkeEhjh4a47FjFZ48UWFxcZlrN99O8pslNHBo" "V4lPTe6hHdS58Ox/Mb+wiFQKdIiNWiilsA7mqk12VYY5d+5xPnr2MSrDQ6wuL9GszrGyUae/" "VObsmTM8du5jHBgfS1aHeLT2vrCLjyDUDBckn//oXgo5yd9duMU3n3ubSsnnrz6xH5xFJ3nO" "WjDGdXdfXmc973hOpC2Xr9+hXqsjVA4hQEgZ12xCxHlPwtWZRYw7iOp4regscA6cxTiobtQ4" "fHCMLz81wkC5n8bqDM88f4mpmRXOfPI0K2sbtJt1FtabXH2nytjQAKdPPUpkJbWNKkaHbNZb" "eFFEqx3xnUtv8WdPfIgv/fHjIBRvvXGZH77wM0KnePaXK+wq5/nsp89we2kDL6oRiXimu+ml" "N90kyd5ZQ7Mdcel6ldvVgDdma2jjiLShUsrFq6vocTBnu8uj1+ncWgM2IsTjpaUiL83VQMhu" "2dBZWDp7QiOG8QcKOKuTUcRtLOAJx0o95J8uvopfvYawhlZoWNmosbDeIr/vYeb1IN/895+z" "MHWVFbGHf7g4xS49z+bqIi5fxjMBc9UGP/jZLWx1mvVawPrbGyysvs4uu4IJtphdXGUlkPij" "R5muRlz4+S382m2UDRnf+/uEzYBmM0D1AVbHY3WJDdbgYs9gs6n53uUllARfSXKe4IXra9ik" "zkNxN6qsiUszkeQ8Zw3ORDgTA1CDY9xfHXWSYxLvwsUD0OG9l5OwrjcaXKlros0BsBohJX7f" "KIWJEjJfYisUXF4zyOLD5Ar9zDUU07V+pN6PwMMaQ244x1sbOazeT363QuX7mQsl05sBLlTk" "ynsojpbRfon+Qo5zj+zDD4cZ6s+DbvP9iz9mraHJ7fUxUYizJsnPBvJlmptV2qHEGoef1N9R" "Yp9K4ERa4EyIh8b6RXTYju124MW5qwNPxPlKR3SK+R7Hu6+6dw/g21Gxv4goH6UTI/FKZ7E4" "pI0olQZw5UGsNfhAfvf+xDhHp3jPAWJwJM6rzpITgvzucUDEIWQt0lnqzTbPX55moHUbEwbM" "La4ys1RDjDwEXh6jQ3rDQxQrWF2H1ZsIY7pbzXs3KfEWTSmPfOUAbfrQ7QCcSXKeVHhKgYnA" "SkTPVuvdnpm823Vnkhy4rWXnkzU6/pxsoyxh92qyGMZpXcfvhaNbMvX2KQVEoeG1OxG2ncdZ" "HymH8Q71IT0fRGLfPYMQuFyJ0tiHkdx1lB3tAFqhwYStJI0JnBB4prbK/OvPYdZWCZX3vn5S" "0nk218FqAPNr7AmBQPbesU29t3ZLFIcTiqKM8E5N7OEv/vJJjDFIKdMO/32onWJjJ4nkwagg" "CJr889O38QbKJU4cn0g2u4p3fYb3AVb80EFQr9fpL/Qlq62DMIrwrH1fR+3/vpJaOIriBw8Q" "J+xOEZx53s4SAty28uKDmOR+K+0UkRm8FMrgpVAGL4UyeCmUwUuhDF4KZfBSKIOXQhm8FMrg" "pVAGL4UyeCmUwUuhDF4KZfBSKIOXQhm8FMrgpVAGL4UyeCmUwUuhDF4KZfBSKIOXQhm8FMrg" "pVAGL4UyeCmUwUuhDF4KZfBSKIOXQhm8FMrgpVAGL4UyeCmUwUuhDF4KZfBSKIOXQhm8FMrg" "pVAGL4UyeO9RQtz/H7ldeHd/mSfTTtqJj+yc3Ilspnu1nZEnpSSKImq1Gp7nPeC2TB0559Ba" "I6VEPPXUU25iYoIgCD6gvzHwm8k5h5SS6elp/htoVcToXnTr8QAAAABJRU5ErkJggg==") #---------------------------------------------------------------------- wxFrame = PyEmbeddedImage( "iVBORw0KGgoAAAANSUhEUgAAAF4AAABGCAYAAABFVyOYAAAABHNCSVQICAgIfAhkiAAADFRJ" "REFUeJztnFmMHMd5x39V3TM9MzuzB7kk9yC55JKSKNGkSCnOAj4iGQKi3IKRA0GABIHi5CHI" "Ux4CBMiL85DkIa8JHMBAAMM2oCSyFdgSFFti5FgWKCoWD5GiDpJL7pK73PuYu7urvjxUz+yu" "uCKNQFAnUv+BwcxUd/VU/eq7ugZodf+Jz1FfnvF37T9ysHd4/DettX+rlEJEUHxyJQqUACjc" "RNW28xUcBxFxDap7AFSnz/akNvdVWv9lfXH6Wdtcnpy6OR+r8YceDfqGx397YPTQ10cffsIr" "7z3qK3f2zzD8/8dLowSFxsQh1sZd7B+cdadV+3mU9hxEpUAEE7UTuGrbfg64h/bz1G9diqfP" "vmRmr178yjuvPf9tdfKLv7J34MCxK0d+48+D1VqDZqMOSnW63n3wP9Pi/B9UAsSETaL5K0hU" "AzTbz1eBWFRpB+Xh+4mtAxqtzWJWb35In43fwcsRDB5kYPQQ7doa5/7lb+LPHar8mV8ZGnt6" "5MQT3tJqlXptnVw+QImg1CZ73sawRcDeY3G02uq+Ati0F0uABHpr7gp7RvdR7h8EEfQ28zRW" "0L7P3NRVWJ2kPPQACzevEdgmez8zgfZ8FBaRDRI68Qi0R6O2zurtG4TLeRr0MHDfhL/euPQ1" "3xr71cLQERZn58kVioTNGrUb52jV151rIXewFbEoP6Bv7Dj5yk6wdusJye+2Y4ux0o2PnlYE" "Oe0G9kEYSb872j6sXW1z3t3aO8dE0LkCjeU5PBtiy3uoGo8otkTmzk55X+GLRvr2s/z2KcaH" "D9NcmaVn5ADNXD9xq0VoIOeDpxRKQSsWxAqFnMYEg9Ra1yg1Vwj6+imOPMSbL/wHvkuklthY" "iC06bjNerFLpU+D7KASltItrgNbOiq/dnGO5towuDYA1W2ZnRQh8zcl9PQz2BmjtoZVitRHz" "1s0qjVa4haVSjtNmZ+hYn2yCmOQzFGDFvW9ZQ7VhA1s8drNE0MpgrMFXGhvHRHHI3oGAA4MF" "QKE7pyq4PNtgqdYmiiIsEBmTjFNotVoUCDk62sPUcpvFWoSxwoPDJUo5j7dmahjxsFYwVoiM" "xVqL52l8EKwVrLWY2OAr+OJnj/L4ycPofBGtwPM8tNbJy0NpzXd/8GOeuWiI4xi9ybyUUkSx" "pa+g+bUTgxzuFyZvTBNFMdfWFjl3ewWv/z5IKifP84iNIGLJ+R6eVhhjCWMLCvzEiowIvlZY" "K0RGyPsKhXIxd5NX5nyNpxVhbImtCx9CZxEUYBFrESuA88Z2ZDg2WuTLjwyiUFsW8x9fmeH2" "ahtjDQgYY7se3I4Mu8qaXz++g8gIX/vRDLsrOf7ksSHmqxHnptcxKKxYB98Y97sCvjMCB16J" "G5DveRSCHF4+h1YKvQm85/nkfE2Q84HY9RW7MTXlvlsRijmf2ZnrfPM7L7LSNNSaEX7PDv7u" "K7/MSstNem5plX89M8uxg3t44kgfRc8iXo7Xrq7z4oV5HjsyyJPHB5leajHSHzC33ua9200e" "PdBLT+Dx3Tdu8ZMr6xTyPhPjfXzpwX56A8WNpZDnzs4ztRziKechSZZBJ+MTwFpLaAyegu+f" "X+TVK2tUCj7N0PLk0QGUCMYarLEIznIRsFbwlDC93OTly8v83sQe/upX9xPkFPWW5d/emCOM" "DUrTZWStuwaQeJUIiAPROdR52aRd2HhXG11cm2zuQTduiFgOHRzjj//gd/iLP/19nnzyF6kM" "H+L+kTL3D0S888YpTn3vGVaunGFtZZHp61eYfPcCvVR56ngf+/UcZT/m+GgJb/l9rr/9JhNj" "RR7fF3P+9Cvsrxh+6RAUGrc4vLvIH35hhB2ywtz1d/n8eInfeiiH11qhm3QQN+bE2hV0LdGK" "cHs15O1bNd6brXN5pspiNXKRwNouB2dk7jomCR3fO7/AT6+vs3cgoK/g8+xP5zgzuYbGOg9B" "sF1WzvP8TZwcQKFr5Z7noeiEGg+tFZ7WqMT6O4u15Qajs0BJvTs7t8ALL77E6lqVmYUV4r6D" "mDjm/Ftv843nTlEvDLNndCf37enhgbEKJmyQD/KUPEt/OEO4XGJxqZ8XXn6Vti7x2Ucf5szr" "Z/jW86d57PMT9Aaglt5nd34/+3cWuFbLM9BXYX1tjbJdIVi6RG3Xo/j5QjK/xIisC0AuxDpP" "73y2JmkTF4ZNlFh8AjuZJdZY2qFh4mA/D470sFSL8D3F40cGuDxbZ2alhdLK9bMu3HTkdy8i" "FmWdlSoleLgEp5VycV6RgFeJtUh3YGC7ERTl4nCnbLw9t8DLb06yIAPkC8OMB73duBr0D2F3" "n2TP2ChfOnGAQrzCN154hYlHjnPy2INoBNuuY41F50sUKkNoP4+xhnzfbnLFMjauI3Gb1fV1" "Vhohly6/z49eP0e5VKTWaDI/t0Cxt4rxAxDravhOpaUdkDix+IO7ikyM97pQExn2DgTcWm66" "49YmMX7D6+vtiJFen9+d2IOxlq8+N8lwf8DTvzDK018Y4a///QoiLq6Lcd7ViQ6+c0O6DZER" "zr53i1a9hvYDVx3opARUzuJ9T/HmO1PEdmeyAHQug3a+jIiwVm3QrFUpV3qRncdR5V14Zc3C" "ap16tYoRhefnWWtZLlxf4uf2+vz8Iyco9PSwsLhE2G7SDCMWl1dpt5uoEiyurFOvVbHGUm22" "idfWQAwXp9f5/pkpHh7bx5d3D1EoBPz49FnstMYLerDWJCXQhtWDYKxFIZy7scrJsV4+M1pK" "ErJiZqXJtfl6klglifFuAcQKYi21ZsxLFxe4PFvj3dk656egHceUA98lU+1Me0uMV4nFI9YN" "zMZE4nF6vsTpmRqoRmcboxu6VfLBqjJeuRdt4y1VhQU0Qr1t+PYrl6m//xrL6yFeX4SOWqzW" "Pf7+2f9m5vxPiKSfQAlr9Tbf/K9rvGSmaC1OU7c+fT0BU1M3KdcrTN48xbXrMwRmmH/4zmvM" "XHoP4w/xT89foDV5mvV6iG0avvXqJP8pt+kxK9QbTaZnF/GL+7BoREw3pioxiHWJz4qrO89O" "VTkzudbNXx0FOY2nk1BsO5ZvsEqhtWaxFvHMmVl8T5H3NSLww4tLSXGhkxLX5Qk3Brsp1IhB" "TIQk5bjXN8zGlsF2t62glbNsG4cfPAQK2m3DpdUWRsYIdvng5xAb02zGvL4YovIPEJTKiFJg" "2iyHlltrBaQ9hO/7TK4K+YF+6qbM9GpIbudxQsqcnTPoyjHyhRIXZpqYcIRgt08uKNIwOS5W" "y0T1EE2R/M4RcoUebNwG7XeJWmmi8kXarQaydJOYPAoh2G6qLSFSHjSWKVR20opiJN9DY3We" "dt5ixZJXIDGErSR+J9eJYoWYkJwyWL9IFLaTGw3l6ngxHfDu5oc46t7UbDL4O+DLPfbICoU8" "qjjmXEwsYiIUUO4pIeVxrHWxFROhlaKyYxewJ8kzKknSlkB77jxrKWuFsANrDUWlUCV3fSsW" "JTE9vf3QN+AA2yRBCmCijYHZGPwCUhlC1m6hWnVwQfPOSSi3VxOUd6D799FoNFA9u7DROiy+" "i7K2U+dtvWlWCiWC5+fI79hHm4C43QQxKMBX2sfzPDcwq1GbMu+99h7vddx5t2xsn3bCkXFf" "1ObVBewmD+s2K7Bx1P0xa9w5ne2QD17fdgHfbcPW8TTaozh4gNI9JqKA0AqtdgvVSWiFPirl" "gXtuJYpAMzKYsNUtPgSFb2qLzJ77AWZ5kdDztwa4T7gEaCvV3Q7Z/qSOMWyUzgAh0ND6Lv02" "X8J2zUmUR0lH+A+P7+LpP3oCY4yrzT91kg/LZJvO2P5vkk5ouve/EorOvn2z2eCfv34Dv7dS" "5ugDh7DG4Pk+d3ecTP9bibh9rFq1SqlY6FQ1EMYxfveWNtNHLwc2jNz+lg8u0ehurMvIf9Ry" "hYDqfgb4NAb1j13bRZEMfErKwKekDHxKysCnpAx8SsrAp6QMfErKwKekDHxKysCnpAx8SsrA" "p6QMfErKwKekDHxKysCnpAx8SsrAp6QMfErKwKekDHxKysCnpAx8SsrAp6QMfErKwKekDHxK" "ysCnpAx8SsrAp6QMfErKwKekDHxKysCnpAx8SsrAp6QMfErKwKekDHxKysCnpAx8SsrAp6QM" "fErKwH8M2u5BRF3wG09MzfRRazu2evNTUTN9fPK11kRRRLVaxff9tMfziZaIuKePa4166qmn" "5PDhwzSbzU/pM8k+PokIWmuuXr3K/wCobQ9DCns9YgAAAABJRU5ErkJggg==") #---------------------------------------------------------------------- wxPanel = PyEmbeddedImage( "iVBORw0KGgoAAAANSUhEUgAAAE0AAABGCAIAAAAGrI4kAAAAA3NCSVQICAjb4U/gAAAAyklE" "QVRoge3bsQnEQAxEUY3ZGhZ34Zrd3YJLEEgX+LKLjeHfvGg300exdJ6npKAbkvZ9n3NW1dvD" "PELSWmtExJzzOI6qQi62qjJz3K+q2rbt7ZEe8a27P8hN3iRJYu7wlztZ3MniThZ3sriTxZ0s" "7mRxJ4s7WdzJ4k4Wd7K4k8WdLO5kcSeLO1ncyeJOFneyuJPFnSzuZHEniztZ3MniThZ3sriT" "xZ0sf9bZ3e/O8ajuHhEhqbszE3kF2t2SRkSstTITebd8R17X9QFVODvBZcI23wAAAABJRU5E" "rkJggg==") #---------------------------------------------------------------------- wxPropertySheetDialog = PyEmbeddedImage( "iVBORw0KGgoAAAANSUhEUgAAAE0AAABMCAYAAAAoVToVAAAABHNCSVQICAgIfAhkiAAADmJJ" "REFUeJztnHlsFFeawH+vumy320fbbWN84cbGNnEaMA7HgjFoMJlBgzLs5lAynrAi0Yxg2Wh2" "pdUqGYn8kc2xEknQrnb/cVaMQjZBWZIMyYZNpJAMs5OQ2EDAgA+MjTHYGLtt4/voq+rtH31g" "46Od9gVZ/6RSlfq9evW9733f+95R1QIf1hzbT5OyVj4bboopYYFRuIb632+/Vvn2jfrqLwEE" "gDXH9tfrHvttaVTqA6aIRZnzK+E9iLOjkcFbtUNnj/3739yor35XtebYfrrusd++FZGzNXJY" "kwz1Dc23jLOO5hrG0VKJPtwXNG/kknxM8akIq8UUt6L2LaBNTcpa+Wtj8vLIYZcHTZezL/E8" "IzU3wy3VpOcWEGW2BM1/7WIZZpOKU8QSk26LHL5V+2s1PDLmKUP8Epy9g95C3Q6GL59AKmGT" "FuaOTCQuay0IMSOVmSt0lwM8DlyKCWffcPD8pkQcXa2IRXGYkrNRwoxPqQC6puPxaACoukZ+" "ShgmcyJCKAgBQogxx4lzDbjd+QjFMOED40wqW5bHAyAlOD06ta2DNHc5kPNk1LpH9551HV3X" "saWaiI1UKWvwump6fAS2VBNfVHcDoGkampR4PBqa5hVaBZBSouvewgwGwfai1dhsNgwGA4qi" "3DkrCorv+uL1Q/TpOoKJLS0hSmXf1nTqG65zs9VOzoNZdC2PofSra1zrAVVRkFL6jFXg1nSE" "EBgUEAgkEk33yqcIgUERvvz+NImUoCgCg68MfPfoE7SKlDoC0HQdTdNYtzSazTlmVEVyoXmQ" "v9+WQoo5nM8udfqUK9F9+pHyLqX5f9B1r1CKogQOoQiEogQORVFAgC51FKlMqDR/mZ999Q3v" "//ECv9hWyO9+81ck9V1EN+VQum8zVc39hOGkrLqJDyv6efihpTz60CJUfRiPMHLsezt/qu2i" "YGkcLz+Ww/nGbhJMClIJ49++aKDq1jB7tlrZtCwGx1A/bsJ4/9R1/tzgRKjhYy1Nlxh8SvNo" "GqX/e5OYCIW//UkqAPY+Fy/8oQGPpgWULKWOrsu7lDaiggjvWSh3XFERCkrg2nvGd6OcxM/8" "KduLN5GXv4bsjBTa7B1ca6gnLD0GAK2vhX95+yM6e4dIyVrFrzZs5OL57zn6P3/kl4/uoGR9" "DrW1tUgZB8Ctq5d4/0wl/7DnV/wsU6e2oZPtK9bQ1tbKkY+/oGfQyY2brTij8ohYtAzE3Y3q" "lUr63BPgo3PtFGabAbjQ1E992+Cohvdbm/9e1ZcSqLyU0mthYoSljbI873VAYZMpzZfW2dlF" "9aVazpY7KD9fRcewgZVLIwD49kwFVbcchC/OY/niJSREh3O+uo7qDkG1XecnG6II66zEsygR" "gEuXr3Lm+iCDLklchAT7JUpPLKUwHZ76xTYio838839+RcdtBV3TEAYxrkyapuPx6KTGRfC7" "n1tp73PRPeimOM9Cc5eDj8+1B/JrukSO555+rUtxxz0Dljbi2hscvH2HHqRP8z/k3KXL/Nef" "ajBEWRBROUQkJWCITQHA5dYwRCegWjLoIZbWXifFRX+BKyqNLQ9l03KrjY72dqKtvQBs2bgO" "U1IX8TEmLlW009/bQ2wEfHG6jlWZCTy6fSnpkQ5qRCxSKEhfvQIy+YZVfvd8dM0iFAGvHb9G" "Y8cQ//jzTH6zJY0Pz7TesTTf4bcP1VdUwPSGnR5+f7yc+G9q7igKn0uOiKT2fh3VAneccCwu" "t0azvYu+gUFUczIRqSvAEI4QCm4R7k3rHwDFAEJwvWOQfz12lr/Mj+fRjdl025v4j4/+m6Ze" "SZYvSnd0tLM+J4XKqmre/cPneNQElibF8FDaCsIMgmOffcWpstNoltUoURaEQb27KQFv9NQ0" "jbe/bubIdy3Ye50AvPF5A7//cxhaoE+TAcWNck+p60jN4y1MKLTEF9LiV4Zk9NmHki5Aakht" "dEuOpNHexy9fPYartY7w5OVIn11KqY9N0zy4EZy75aGs4hs8HY1IzYUSaSYsxYYweYcu5yrr" "OPHuSaTHhSHSQlhiJi8fPY+z6Xs0x4A3+sakEW6KR0odfPUKqExzA3csraNPG5XeO6TRO+S6" "k1/3Dvql5kbq2gilSQ39rsKZxO38StQ1bfI8gGqxYjCngFCQyEDjTJYWtigb1WIN9JfCoKJh" "YHDYiUfTUM2pqAlWEArCEA6KgjGr0KskKb2/q+FeY+CuRhUGHLoKbQ3oYVFB5Vf6bqGk5uFw" "Ob3lE7A0LdACs4JvdiE1DdCmlibUQLtJCeVX7BT/3Un0/nbCkx/w3ie84zw0DYTBe/jw9mXj" "e0F4YiairwWt73pQ0SPSVjEwNOwrc4Sl4XYgHcEnr/NNePwSiF8CgHQOhFyO0wHhEbEYIuOD" "5h3sbr8zrPJbmpkeEm6XExc2cd/0o2RiQxzNiDiiaRqmOIG6cnkW//Tic7jds+iePxIGBgZ4" "5ZUmlMlG9AuMjyLus6Wde4GJZ9sLTMjdw+Vxaenopa65HZf77rHcXCOwxEbxYOZiooxjVzDm" "CjVYn3a7b4i3PjtP83A00jD5au5cEK518jN7Nzs32QgPm3gBdDZRg/Vpx7+tJi4lk9RYS7A5" "wqwjAV3zcLaumr6WjzEK17j5FEUhLi6O1atXk5ycPONyBLU0o7uHfdtXY44JPuWYC3Rd51NX" "LYnmbLKs6RPmaWtro7KycnaUFszSYlU3qjL5YuNcE616MIYpuFzjWxpAcnIyNTU1s/L8+3Kc" "NhWZFUUJrBHONFMactxLir0XZJnSkAPuDWHvFaastPHQNC2gTCEEBsP0hwD6iA0Pg8HA3X3u" "yJ2z+SJkpfX29rJ37140TSMyMhJFUXjuuedYt27dtAT68MMPOX78OGfPnuXIkSOsXbt2WuXN" "BkGjp5+7W9ff4m+++SZpaWkcPXqUI0eO0NjYyLfffovFYmHv3r0sXrwYu91OaWkpLpeLBx98" "kPT0dDZv3kxFRQXHjh3DaDSya9cuMjMzKSoqoqCggDfeeGNcq7oXLG3ageDixYuUl5dz4cIF" "li1bRkFBAfv372fx4sW8/vrraJrGa6+9RnZ2Nvv27eOdd96hrq6Oq1evUlpayhNPPEFBQQEH" "DhwAIDU1lbS0NMLD52+aFIygg1s/47U4QEVFBRaLhQ0bNrB+/XrKy8u5cuUKt2/fprq6mp6e" "Hq5evcqrr75KTEwMxcXFADQ0NFBXV8enn37K0NAQ33333ai914meey8wrUAAsHv3bjIyMtB1" "ncOHD1NfX8+uXbu4cuUKNTU1qKpKWFgY3d3dREVF0dXVRUJCApGRkWRnZ7Nz506MRiMlJd4X" "MP3u59+L9b+74edeUOKU+7SpEB0dTVdXF6dOneLmzZt4PB6io6PZuXMnBw8eJC8vj6qqKnJz" "c7HZbGRkZPDll18SHR2N0WgkPz+fkydPcu7cOaqqqjh69Ch9fX1s27Zt1HPmW3Ehu6fJZGL/" "/v0kJSUFrGHHjh1kZ2fjcDjYunUrjz/+OEIInnzySVasWIGmady4cQOj0UhCQgL79u2jsbER" "t9tNYmIiUkrS09MxGAysX78egJSUlHvOXadkaeMJqqoqNptt1G9RUVEUFBSMyXv79m0uX74M" "QGtrK7t37wYgMTGRxMTEUXlzc3PJzc39wfLMJVO2tOlgNpuxWCy43W5eeOEFcnJyZv2Zs8m0" "A8FUMJvNPPLII3PxqDnhvpt73gtyLGyshMDCFl4ITMk9tSm8HTRXCCGIjIzk/PnzqOrE4gsh" "6Onp4YMPPphy2bquY7Va2bhx46T5phQ9Z2LJZ6YQQlBUVMSWLVtmvOyOjg7KysqC5puT6DnT" "jFxzm0mm6lELgSAEFgJBCNyXu1HzzYJ7hsC0loaCWemP1fVDjp66rlNXf5WOjs5x05daM0hP" "T/tRKm5afVplZTUWi4UlS9JHHZqu09R8c0x+u93OwYMH6erq4vDhw9OeR1ZVVfHSSy/x9NNP" "c+LEiWmV9UMI2T2FEAhF8MDyXMzm2FFpUkra2trGWFl0dDSbN2/m4sWLHDp0iKSkJDZt2oTL" "5aKmpgYpJTabDYvFQnV1NUajkebmZgwGA6tXr+bGjRvY7XYyMzPJysoiJiaGhx9+GFVVaWpq" "ClkJP5RpDm4nVrgY8/UbtLW1ceDAAZ599lncbjfd3d309/dz6NAhYmJi0HWdsrIy9uzZw6FD" "h9B1ncLCQsrKyjh+/Di5ubm4XC4++eQTXnzxRaxWK1arlQsXLkyvGj+QkKOn9xupydMnwmaz" "kZKSQklJCQMDA1RUVLB27Vry8/MpLy+nv78fKSU7duygpKSEPXv2UF9fT0lJCc888wwA7e3t" "oYo+baa1cjvZF3hTfQPQ6XTS1dUVmIAXFxdjMpkA73K4EAKTyURERATR0dE4HA4MBsO8LiJM" "q09DQHd3Nx7P6Hdx+/v7J1VoVJT3BcErV66QlJTEqlWrWLlyJVlZWdjt9ilvFPf29tLe3k5n" "ZycRERE0NTWRkZERUn1+CIaioqKXtm7dOuEEuL6+nszMzDHLMEIIPB6NquoaGq41jjp6e/vI" "yV6GOXZ0gPDvYxYWFgJw8uRJ1qxZw5o1azh16hSnT59GVVXy8vLQNI3c3Fzi4uIC9/k3bVwu" "F7m5uTQ2NvLee+/hdrtxOBy0tLSwYcOGkJUxNDRES0vLhHsYLpeLr7/+GvH888/Ll19+ecIv" "Vj7//HOKi4sxGo1j0oJZ6f02Revs7OTMmTPs2LFj3HTvFyuvTC963m9KmSkW5p4hsLA0FAIL" "lhYCC+tpIbBgaSEwpejZ29uLw+GYbVnmnanWMeiMIC0tjdOnT8+ETPcFVqs1aJ6gc8/8/Hzy" "8/NnSqYfBQtDjhBYCAQhsDDkCIEFSwuBBaWFgCqEwOl0Mjwc/J84/7/j/yhX7Ny5U2ZlZS38" "A8wUaWxs5P8AE8WsR2Tbeb8AAAAASUVORK5CYII=") #---------------------------------------------------------------------- wxWizard = PyEmbeddedImage( "iVBORw0KGgoAAAANSUhEUgAAAIsAAABOCAYAAAAQPOX4AAAABHNCSVQICAgIfAhkiAAAF4JJ" "REFUeJztnXl4VdXd7z97n2GfMeckOZlzMhASCAgyakVRJmdLK9ah7+stb1+tw9OqfSvFVmt7" "b7nX9nnt7dtWbav11VptHajloiIiCIpihDCFwTBKICFkICRnyBn33uv+cZLDnIQQQ6Dn8zwH" "nuyzztpr/fZ3/9Zvr70GiS4sNsc9+cPHPJtXdhEpUgAc2ruNxj1b742Egs8BSAB2p0tc/Z2f" "4yq+GCWr9NyWMMWQIdq6D9/+Gpb/6ad0BnyS0WJz3HP1dxZgH3UtYU0Q8ocAsBgEVqPAbJRp" "jwhimnSOi57ibNFiESINW9Ai/l7T2orGYXPnYxqewbBpd7L7g5fuMeYPH/Osw3sR4ZiKpgvy" "bXHKHVHSHTI2EcBulvmoycn+oILHBoc7daK6YRCqlmIgEVqccON2vBXjsbkyek3/Rc2nuKxG" "opITV8nFpOcPe9aYN+wiDOlFRH2dAKQZInh8n9PeqlMX1uhUsmmVFbIUldnlVnYfPMIR1Upz" "2EhbxEAgLn/Z9UwxAOjRCMQjRGUbEX+49/Q2D5EjjUhZldhyy3HlDsMIoOs6qqoBsKXNzCHD" "MDo0O8E9azH41yOP+xqZmkrVui3Ej9TjLfEyo7Kc9+pkqhu1L7eWZ4gsSYwrcpLhMLGq9giy" "BFdUpBON62w+EECWYVxRGp1RjUhcI9+tsL7Ojy+kDmg5nBYj44udBCIqm/YHBjTv/qBrieuk" "6zq6rlOZZ8NpMbBuX6JsBekKlblWVtR2AKBpOpoQqKqGpgmAhFiELtB1PZEZcEhzAiAXjSey" "egNK2E9b1EjVp8sxV17Jfr+X2s1+mmNOdN00qJXuDVmWmFjsYPb4bNas/pDM4RO5a2o+4VCQ" "x2pryPBWcvfUfJat/JjGOIw02ahuqEWzj0A2W45mJEDQ9QRwTLgmEnZDOuaYhIQAhBDJ426b" "gTsvy2VH7Q6qqr/A7Cn5UuvdG0LXkQCtSyxTypxMKUtDlgQ19Z08OCMPb4bCsu1HgISohBDJ" "/6FbLEIkDxyHUUEfdjmmxY+jIROd/C9ouRcTjkFTzN5djMGoa5+Ja4LtBwPceLGH0RlxbEo7" "8WiIWDiIJ1ZPrlJEVpqJj9d8yrQrr+ArE0bx95Ub+M7V2YzMd+Kw20lz2nnmrY28t1tlziWF" "XFVmxW4W6AYbf/uskRXbD/P83eOQJYnWjk78AR9/WL6PQm8+90wvQhER9rZGcSgGtIifUP1m" "jOmFSPK5i/V0ITCQEIuqqjyzsp40SzEPzSwAoCUQY/7CPcTVhIcVQqALga6LE8TS9eWpMOUM" "p62jHVvJBOSC0adNN5Ro8kU5HIwzfvRwwqrO5zt209kZ5OIRxaRlmYhEIuzZ38gMxQ66jh4J" "8vKqnbjD+5k7ZxZup43Plr5GyDmOHfUWpLY2XFYDUyaN4aZKK1XVDQgxFoUY7yx+k8/3NXEw" "CD+c812UuI/fPf9XikuGMbHkumSZhBBH3dK5oOvcosuzALy+rplLStMA2LQ/wK6mzmOSd4lF" "CLodgrE7o9OJQFZsqLqE7M4Hs+28EMuhjiiHOqJcOq6SDl+A1xcvw+XJ5cYrx6NYbGz9fBcR" "3YhkSjQ7Qug0d4S56dpZ5HgyePT//Jo9HTK5OWbunFpKlqWA5pbDWM0mIsYwpo69iFiYvYcO" "8snm3QSc5Rgy0ijJsrN16142fnGEdttwbo4cjefOtVi6r5um6aiqTnaamYevLaItGMcfVpk2" "Mp29LSGW1BxOptd0gdD1ZLG7PMvRmOVYyjxGKrNNvGkCQ9xPtiWMw+FA13UafSqd8aHZ9+IL" "xalr6WT8pFxcTgfbd31BoTmPDE8OmU4L769YBbYMJMPReOuGCV5uurSU91Z8hDurkOKsHEwZ" "XiryHNTu2Mmipav4H9+4CbPJALqKQCRcttmB0e1FtrnZ0xKmoqyEq6+8lOKK0aSn2ZL5C5Ew" "/LkiKRZdR9U0bpucjWKU+cU7X7C3JcQjN5Zy/wwvizc2H1deXZzQDJ0YdphkKHAb+MHVuWTa" "TbxvU8iINTJ3okJWdgadwSDPr2mnM24evNqeAUIIaur9XF5mo27fPjpCMURApr4tRCzkZ2vt" "LmSHh5Bm5HC7D1XVGO114/P5GDNqBKMqR/Lq8o2sbQvxZtU+Litxccmk8bR3RjGKOKqq0h4I" "4/P5E27akDDjf6/cw7cmuZg9dSx7GlqoP2jFHwh2F+q4QPkcGAVIBK6apvHymgbeWNtI/ZEI" "AE8u2UtWmoLW9dTU3QyJY5oh6fKvf0d4r3uQptZ2AIwGmD02jdnjs3A5rAgheOPFl/BkZjLl" "6plYrFY27mnlf7/XMtRi25OI1K0jfngfBocHS9F4hKYR2vUhSGAtnoQho5hQ7XLQ4igFYwnX" "rQX9aNNhKZ6MwZlN5MBGtGALRocHNdiGQbFjyq0kUleN0elBKbkU2WxDCJ14235ijVsRmorB" "mobWeQSTuwBl2FeQpHMX4Ip4GCVYj1wwjnAk2nv6zsNYtQB6+jCiwQ5aPnyu+9FZQ9cTUbAk" "gRb2s7ImhtGsYDQacU+4FtlsJqybqGvs5C+ftaFrA9sv8WVgLhiDKWdE4g+jgmSWsY2+FnQd" "yWBECB1L2eWgqUhGE7ZRie+SyDKYLFhKJiO0WCJPSU783mhO5CV0kA1J+xnd+RgcHhAaSVfS" "5dKFdA7vLkkmoknQUocwO3pP7zuIXFBJNBZFiIRNkmIRahyAiAr/2BIHeu/lOy8wJJrKRICp" "gWwCucspqnGQDGA0JP6WZTihQ1poKiCBQTl6sPv3ciLmEbp+gsiMdLfwx+dzbjFnD0dqryPe" "Wt9rWlvJJIKdifeEosvbJmqkRhHRc9/LmOLLJRoNoNg8KM7cXtMG21uOPvl2i8WutWNt/IRi" "9dwrP8UgEDzzn+i6jtkpMI6rHMbPf/4g8Xh84AuW4oIgGAyyYEHjiS10ihSnJyWWFH0mJZYU" "fSYllhR9xth7kr4hhKC9vZ26ujqampoQQjBypJWCggoUpQBJGprvkVL0nQERixCCJe++y6Jl" "q7AWjCKeNoyOkMS/Z/5fXng9i+EFV/Evd9yOoii9Z5ZiyHJWYpHlz/H7dX77+7fpMFu47F/v" "QugSlrw8pPhOHLZCPjI9zsK332TVmsdY8NgDFBcXD1TZUwwyZxWzSNJB/IF5FI/t4NY7Z9BY" "8xZv/M+57F73DBUZT7OhPo/wFyvQkVi5N4MnfvXseTEeJsWpOSuxbNqUxXN/L2H2dXsozdiG" "TTbyran1GDc9z1M/2cr2516gtO5xFIuCIWMUb37s46233h6osqcYZPrdDOm6zpL3VmAaezf7" "gk1kW5qJxaNkeGW+eb1EW1sHVrPEY08VsVsrQ2oOoKdN5G8L3+Waa67GarUOZD1SDAL9Fovf" "76fBD1mKj0BnOtUrDtC25TVG/asRixkKCgwc2K+y90gW7sIIXxu/nFhnjDQtSEtLI8XFZQNZ" "jxSDQL/FEggE6IgaEUd0Xnrqz7RULWXB3TI5ipHOOoFkEDhDEjeN2MyLNe+zX7FjUtJoaozT" "1uYjFeeef/RbLIqiYDIYaWgWqIFGMvMn8pu36hnmbeS7Uy2YPBJtUcGIYVGkt9/h3cZvY80b" "TXbnekymoTXXKEXf6LdYqqqq0Js2Yj28F6tUwm3frKTR/yJP/MrNrZeH+fjvUVrX6zxbX4DP" "MRlbgQlddpLj8FNYWDiAVUgxWPRLLOvXr6empoZv3n4r0WiU5uZmMj3vs1WLY794FL98bQ1V" "2xzc7Agx1uZjbcYwlNyRBOoPMP36YaSnpw90PVIMAv16dH7ppZe45ZZbWLRoEY/8+HGee/VV" "Gv0HeeODb6KMjrK8Q0K7XeKjeJwJriCeyGdEOqIU6mu5+665A12HFINEvzxLPB6ntbWVSCTC" "sFIvRlOY3/zGQl0A7CaBeZYFzNAx3kVHTYCLIuv5vC2DX/7kTtxu90DXIcUg0S+xTJkyhcWL" "F1NeXk5ZWRlOp5Oy4RX8r98+T2PHXnSzitVsJmSOsyEucb0rhqsoyCWTxw10+VMMIv0Si81m" "o7W1lTlz5jBq1CiampoYPXo0WZ4M5s2bh1cfR0FRAWk5acSuDeOrWoj7UA3rqtZw7Q03DXQd" "UgwS/RJLcXEx7e3tuFwuPB4Pra2tHDhwgNLSUubPn8/WrVuZN28eVqsVn8/Hw3dvp6ypmrdf" "+W+uvu4GZDk1jOZ8pF9XbfLkyYwdO5a6ujo6OzvJz8+nubmZqqoqQqEQQgg6OhKLwrhcLuZ+" "bx5VkXTCukw02vtsuBRDk373s8ydO5eFCxcSCoW44YYbaGhoYMeOHciyTCgUOi7tJV+5jB88" "+UfGjh2beid0HtPv9mDEiBE8/PDDNDU18corr1BdXc3mzZvZvn07Y8aMITMzM5lWURSmTZtG" "RkbvC9+lGHo4HInprv32LJIkYbPZuOuuu1iyZAnZ2dl8+9vfprS0FKNxwEZrphhCGIGzmmDm" "dDq54447BqxAKYYewWBiGmPqsSRFn0mJJUWfSYklRZ/pUyTa3t6e7DdJceFRWtq3zT36JBaP" "x0NOTs5ZFSjF0CUWi/UpXZ/EomlacmG6FP+8pGKWFH0mJZYUvdLdg5sSS4o+02PMoqoqDQ0H" "icaiQ37N2/4gG2Sys7JwuVy9pg2Hw7QdOXJBLqcmyzJ2m53MzIweV7voUSx1+w+w8sPV2Gw2" "rFYL0jldInrg0TSNtDQnV029Aovl9Cs8xONxNm/ZSiAYxCAbLrjlQwyyTGcoxCWTJ+HJPP3L" "3h7FsqZqLcOHD8ftdhMOhy845xIMBtm5cycjK8opLi46bbrGQ03EY3FmTZ+G2XzhLRsihGDX" "7t1s2LCRa6+ZddL33e+GehRLLBol3e2muLiIdHfvrvp8Y9VHnyDJBmK9NC1+f4A0lwvZYEzu" "x3OhkZ+fz8efrOkxTY9ikSQJJAlVVU+5a8j5jkD0sWFNbHZwIfc1ia6NqHqibwNPetiP6J+J" "09lACEEsFsNgMGAwnDqmSWyJN3Rjnb5c3z6Jpaedzv7ZOJUd2tvbWf3eUjJzcxl/yaXYbLak" "MILBIOvXV5OVlU1FRcWQnec9cGI5C8/S0tJCS0tLcqPJzEwPCEFuXt4pz9Pc3IzD4Uh2BB37" "XVNTEw6HA6fT2a+ynHzCM0t7Ojts37ge48aP2NHaRtOBOmbOvhmXy4Wvo4NVi/9B8NMPWG+y" "IT/wMOXlFRgMR7eSCXV20tTcRCQcwWgykZmZedyQ1IGmo6ODaDR60ru+ARNLf9F0nVdefpma" "LTWMGTMGgIkTJyIE5OTmnuSWNV2ntraWktLSk8QSi8V45pmnmT59BjNnzvwyi31aTmVOAWgC" "DG3NjA22sfmtV1ltMHDJzGuofu8dWP0epkMNhPKG4/f70XU9KRafz8cf//AHYvEYVquVWCyG" "1WLlP37wgy+tDh+sWMGu3bt55Ec/Qj7DZrGPYumnZxECgeC6665PDr1ct24t69dvoNBbyO9+" "81u8RUXs3LmD6dNn8LWvf40dO2pxOBwIXefPf36R+gP1ZGVn8dBD3wdg06aNLFr0D+LxOA89" "9H0qKyvPvFz9QHTX5xR2GDlmLIuzS9EPN3OR5Kdm8av8fe9e3HtriDYdYm96PuNmXk9eXj6S" "LCc8FPDaa69hNpu57/77cTqdRKNR2o60IYQgEAzy2//6L/bV7cNb6OV7Dz5AOBTiiSeeoKxs" "ODtqa5k6dSq33XEHsiSxbNkyli59F1VVuf6665lzyy3U19fz7B//SFNzE7NmzuLmOXMQXcE6" "XWU4rn690MdmqH8duN2GXbt2LQBWmxWhCxoa6gl1hti2bRv33ncft956K4//5CdMnjyZhoYG" "yssrqKmpIRaL8bunnyYYDGCz2UFAa2srTz75K95f/j5PP/UUT//+9/0oWX8QyYt8IhmZmVxx" "512seD5C2d7NjA42c2TLajpDETaZXVwx+xtcevkVZGVnI8syAgiFQnxWVcWPH30UZ1piB1TF" "YiE/vwBBoqPsvvvvx5mWxjNPP8XLf3mZG2+6kW1bt3Lfffdz2+238/hjj/KVyy6jtraWlR98" "wBO/+CUOu53Dhw8TV1We/M//5KavfpWpU6fy6KM/Jis7u7sqiWtzjGfpizPo07sh0W2oM/2Q" "EJrDYScjIwO3252YjdglvuKSYioqKsjMzMThdBCJRpLnnDx5Mpqq8fB/fJ9P13yazG/atOmY" "FYXRoy/icNvh5CbV/fmcEeJooH/iR5IkSsvKmPD129hqz8ZgT6NIj2KIhrEoZvLy8/F4PJiM" "xuPPLZ06v+7Y7ZlnnuaB732XDz/8kEOHGhG6oNDrZWRlJenp6bjc7q6OxR1MvepKXC4XBqOR" "nNxcfL4Otm3byt/++goPPfggmzdvprm5KXnjn1iXEz3NqejjozP92ma2ezPGUaNGMXPWLCRg" "xYoVCVcoBLIsI3VNZZWQELroOo2gpKSEn/7sZzQ3N/PII/MZMWIEIBLTTIRAlqTkDqFSP6fD" "9m6ekyp0Wjt0tLfTuH0LwxWJuk6dwxGVyXYziq+B9QtfIT09g4qRIzF1TZOxWixMnz6DN954" "nXvvvY/0jAzCoRCNhxrxFnr5xS+e4KavzuaHP5zPa6+9yvbt2xFdNjsaa0hIkkRuTi67du4i" "EolgNpkIhULYrDa8RUXMf+RHlJeXEwqFMBqNLF36Lt1e8tiI5dw/DYmui98ltmMfwZP5dR9P" "xkWJzqH11dUsW7aMuBqnuLiYtLS0pJCSzcExd/tgcDo7tB05wqrX/4pxy2d0tjTxubsAW2UJ" "NbXVXGwFdd82Plm0EPe/30NeXl4ysJ89ezZ/+ctL/PrXv07mlZeby7333ce4ceNY8s7brK9e" "h9/nx2A0dHmAo/ETJLzajJkzefHFF/jZT3+KyWRkwoQJ3HzzHO68806e/9OfUBQFZ5qTOXNu" "Oel6JOt2zL+nQ5o/f754/PHHT/nl8y+8xMRJk8jO8uDuR3e/ruusXfsZBtmA1ZbY49jr9SLL" "Mlarlc5gEHd6OkIIfD4fdrudcDiMoigIXccfCCCEwGazYbfbCQaDKIqCoiS2lPX5fGc1y7Fq" "bTU7andw7dUzKB9++tUzt27bjqpqjBg54pRi+Wj5+7QtfIHYoQa2OLIZN/sbjLl4HBtXLSf6" "0VJGxf2sMWUy5aFHmHTJpZhMJvx+H/v3HyAWixEI+FHjKrIh8fY3Ny+P3NxcfD4fABaLBSEE" "drudQCBAepfN/H4/NpsNk8lEOBTCH0hsXWi32xP7b2saHT4f8VgMo8lEWloaqqqiqupJ3Q/R" "aJRF/28xd/3bt06qX2JzqgV97ZTr352rqiofr1593LG5c/+NnNzEHn3uruXCJElKLvJzbKeV" "5YR50WldgSCAwWAY1OmwR+/mE44LQWZuLpt0I83uQi665gYunTKFvNw8MjMzWWmxsW7lu/jN" "NjRNT742aWpqYum7S055rvKKCm6+eQ7Z3QHpMaQfY7Njh1ZYbbbkDdmNfAobnU2nYJ9jlv64" "epPJxA/nP3Jydudpb/DpmrzyihHwwDx8/gAFXi+5uXmJDjaPh2tuvoVNXi/hSBRvUREGQ6I5" "KS+vOKVtkucaZBt1N3E90WfPcr5e4B45oyqd/inKZDIxYtRoNE1Lvh+Co3f/5VdOQ1XjmM1K" "4tF5CNpywAJcXRNo2oX41vkM0grQNL0Ho0oYDAlznmgrg8HYJSBpyNqxL6MKeh2iIITgSHs7" "4Uikp6TnJaoaR9f1XocpGA0G4qpKOBxBF0PzYp8tkXAYRTH3mKZHseTkZLNz1y4sFutxL78u" "BATQGQxiMhlx9PJi0pPlYcPGzSiKBavN3q8+p6GMruvs2rWLERXlPabrUSxXXnE5qz5cnej5" "G9DiDQ0URWHm9KvIzvL0mM6TmcmYi0axrnoD7e0dQzLmOBsURaGsbBgXjx3TY7oexeJypfGN" "W74+oAUbaui63usIOEmS8BYWUuT1DlKpzg292aFHsQghLsipD/3hQhxWeqYYIdFDd+L4ke4R" "3SlSdK8wagRYsGDBOS1MiqFPXV0d/x9DK+bUFdQgewAAAABJRU5ErkJggg==")
75.642857
78
0.867389
952
24,357
22.191176
0.978992
0
0
0
0
0
0
0
0
0
0
0.153624
0.063555
24,357
321
79
75.878505
0.77259
0.01634
0
0.006494
1
0
0.902676
0.902342
0
1
0
0
0
1
0
false
0
0.003247
0
0.003247
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
5
9ff8f61976eff004aa5837591be6194d7e0cd3a8
42,756
py
Python
osometweet/api.py
osome-iu/osometweet
0db17955828f1e3c4e1c49a6cb177a7a6daa076c
[ "MIT" ]
18
2021-01-11T19:42:38.000Z
2022-02-28T09:44:29.000Z
osometweet/api.py
osome-iu/osometweet
0db17955828f1e3c4e1c49a6cb177a7a6daa076c
[ "MIT" ]
67
2021-01-09T15:33:06.000Z
2022-02-02T21:31:30.000Z
osometweet/api.py
truthy/osometweet
0db17955828f1e3c4e1c49a6cb177a7a6daa076c
[ "MIT" ]
1
2021-01-11T19:42:44.000Z
2021-01-11T19:42:44.000Z
""" The core osometweet collection of API methods. """ from typing import Union, Generator from osometweet.utils import get_logger from .oauth import OAuthHandler from .fields import ( ObjectFields, ObjectFieldsBase, UserFields, TweetFields, MediaFields, PollFields, PlaceFields, ) from .expansions import ObjectExpansions, TweetExpansions, UserExpansions logger = get_logger(__name__) class OsomeTweet: """ The core osometweet collection of API methods. """ def __init__( self, oauth: OAuthHandler, base_url: str = "https://api.twitter.com/2", ) -> None: self._oauth = oauth # A lot of endpoints can only receive payload parameters specific # to their endpoint, initializing with all of the different objects # will lead to a 401 error if we have unnecessary objects so we can # solve this simply by initializing with an empty dictionary # and updating self._params for each method. self._base_url = base_url self._params = {} ######################################## ######################################## # Helper functions def set_base_url(self, base_url: str) -> None: """ Sets the APIs base URL. The URL for API v2 is: - https://api.twitter.com/2/<endpoint> Parameters: - base_url (str) - base url of the api Returns: - None Raises: - ValueError """ if isinstance(base_url, str): self._base_url = base_url else: raise ValueError( "Invalid type for parameter base_url, must be a string" ) def _decorate_payload( self, payload: dict = None, endpoint_type: str = None, everything: bool = False, fields: ObjectFields = None, expansions: ObjectExpansions = None, ) -> dict: """ Method to add fields and expansions to the payload. If the `everything` is set to True, then all optional fields and expansions will be returned regardless of the values of `fields` and `expansions`. Parameters: ---------- - payload: (dict) - the payload - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (TweetExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - dict """ if payload is None: payload = dict() if everything: if endpoint_type == "user": fields = sum( [TweetFields(everything=True), UserFields(everything=True)] ) expansions = UserExpansions() elif endpoint_type == "tweet": fields = sum( [ TweetFields(everything=True), UserFields(everything=True), MediaFields(everything=True), PollFields(everything=True), PlaceFields(everything=True), ] ) expansions = TweetExpansions() else: logger.error( "Invalid endpoint type, must be 'user' or 'tweet'." ) # Include expansions if present if expansions is not None: payload.update(expansions.expansions_object) # Include fields if present if fields is not None: payload.update(fields.fields_object) return payload ######################################## ######################################## # Search endpoints def search( self, query: str = None, everything: bool = False, fields: ObjectFields = None, expansions: TweetExpansions = None, full_archive_search: bool = False, **kwargs, ) -> dict: """ Return tweets matching a search query. Use either the Recent Search or Full Archive Search endpoints via full_archive_search parameter. Recent Search: search tweets from the past 7 days - Reference: https://developer.twitter.com/en/docs/twitter-api/tweets/search/api-reference/get-tweets-search-recent Full Archive Search (Academic product track only!): search the complete history of public Tweets. - Reference: https://developer.twitter.com/en/docs/twitter-api/tweets/search/api-reference/get-tweets-search-all How to Build a Query: - Reference: https://developer.twitter.com/en/docs/twitter-api/tweets/search/integrate/build-a-query Parameters: ---------- - query: (str) - One query for matching Tweets. Recent Search query limit = 512 Full Archive query limit = 1024 - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (ExpansionsObject) - Expansions enable requests to expand an ID into a full object in the response. (default = None) - full_archive_search (bool): True = use Full Archive Search endpoint (Academic Track only). False = use Recent Search endpoint. - kwargs - for optional arguments like "start_time", "end_time" and "next_token" Available kwargs: ---------- - end_time (date (ISO 8601)): Used with `start_time`. The newest, most recent UTC timestamp to which the Tweets will be provided. Timestamp is in second granularity and is exclusive (for example, 12:00:01 excludes the first second of the minute). If used without `start_time`, Tweets from 30 days before `end_time` will be returned by default. If not specified, `end_time` will default to [now - 30 seconds]. - max_results (int) : The maximum number of search results to be returned by a request. A number between 10 and the system limit (currently 500). By default, a request response will return 10 results. - next_token (str) : This parameter is used to move to the next 'page' of results, based on the value of the `next_token` in the response. (E.g., after executing `response = search()`, `next_token` can be found with `response["meta"]["next_token"]` - which should then be passed to the search method) - since_id (str) : Returns results with a Tweet ID greater than (for example, more recent than) the specified ID. The ID specified is exclusive and responses will not include it. If included with the same request as a start_time parameter, only since_id will be used. - start_time (date ISO 8601) : The oldest UTC timestamp from which the Tweets will be provided. Timestamp is in second granularity and is inclusive (for example, 12:00:01 includes the first second of the minute). By default, a request will return Tweets from up to 30 days ago if you do not include this parameter. - until_id (str) : Returns results with a Tweet ID less than (that is, older than) the specified ID. Used with since_id. The ID specified is exclusive and responses will not include it. Operators: ---------- Standalone (can be used on their own): - keyword - emoji - # - @ - $ (Academic research only) - from: - to: - url: - retweets_of: - context: - entitiy: - conversation_id: - place: (Academic research only) - place_country: (Academic research only) - point_radius: (Academic research only) - bounding_box: (Academic research only) Conjuction (must be used with standalone operators): - is:retweet - is:quote - is:verified - -is:nullcast (Academic research only) - has:hashtags - has:cashtags (Academic research only) - has:links - has:mentions - has:media - has:images - has:videos - has:geo (Academic research only) - lang: Returns: - dict Raises: - Exception - ValueError """ # Set url and initialize payload with query if not isinstance(full_archive_search, bool): raise ValueError( "Invalid type for paratmer `full_archive_search`, must be a" "boolean object (i.e.,True or False)." ) if full_archive_search: url = f"{self._base_url}/tweets/search/all" # Check query is not too long, create payload if isinstance(query, str): if len(query) <= 1024: payload = {"query": query} else: raise Exception( f"Query length too long for academic search endpoint. " f"Current query = {len(query)}. Must be <= 1024." ) else: raise ValueError("Query must be passed as a single string.") else: url = f"{self._base_url}/tweets/search/recent" # Check query is not too long, create payload if isinstance(query, str): if len(query) <= 512: payload = {"query": query} else: raise Exception( f"Query length too long for standard search endpoint. " f"Current query = {len(query)}. Must be <= 512." ) else: raise ValueError("Query must be passed as a single string.") # Populate payload object w/ fields and expansions payload = self._decorate_payload( payload=payload, endpoint_type="tweet", everything=everything, fields=fields, expansions=expansions, ) # Add kwargs payload.update(kwargs) response = self._oauth.make_request("GET", url, payload, stream=False) return response.json() ######################################## ######################################## # Tweet endpoints def tweet_lookup( self, tids: Union[str, list, tuple], *, everything: bool = False, fields: ObjectFields = None, expansions: TweetExpansions = None, ) -> dict: """ Looks-up at least one tweet using its tweet id. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/lookup/api-reference/get-tweets Parameters: ---------- - tids: (str, list, tuple) - Up to 100 unique tweet ids. - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (TweetExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ if isinstance(tids, (str)): payload = {"ids": tids} elif isinstance(tids, (list, tuple)): if len(tids) > 100: raise Exception("Number of tweet ids exceeds maximum of 100") payload = {"ids": ",".join(tids)} else: raise ValueError( "Invalid type for parameter 'tids', " "must be a string, list, or tuple" ) # Set url and update payload with params url = f"{self._base_url}/tweets" payload = self._decorate_payload( payload=payload, endpoint_type="tweet", everything=everything, fields=fields, expansions=expansions, ) response = self._oauth.make_request("GET", url, payload, stream=False) return response.json() def get_tweet_timeline( self, user_id: str, *, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, **kwargs, ) -> dict: """ Returns Tweets composed by a single user, specified by the requested user ID. - Max: 3200 most recent tweets (using pagination_token) - Default: 10 most recent tweets (tweet_id and text data fields only) Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/timelines/api-reference/get-users-id-tweets Parameters: ---------- - user_id (str) - Unique user ID to include in the query - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) - kwargs - for optional arguments like "end_time", "until_id" and "pagination_token" Available kwargs: ---------- - end_time (date (ISO 8601)): The newest or most recent UTC timestamp from which the Tweets will be provided. Does not override 3200 limit. Has second granularity, and is inclusive of that second. Minimum allowable time is 2010-11-06T00:00:00Z. - exclude ("retweets" and/or "replies") : Comma-separated list of the types of Tweets to exclude from the response. "retweets" still returns max of 3200 tweets. If "replies" included, only the most recent 800 tweets are returned. - max_results (int) : The number of tweets to try and retrieve, up to a maximum = 100 per distinct request. Otherwise, 10 is returned per request. Minimum = 5. - pagination_token (str) : This parameter is used to move forwards or backwards through 'pages' of results, based on the value of the next_token or previous_token in the response. (E.g., after executing `response = get_tweet_timeline()`, `next_token` can be found with `response["meta"]["next_token"]`) - start_time (date (ISO 8601)) : The oldest or earliest UTC timestamp from which the Tweets will be provided. Does not override 3200 limit. Has second granularity, and is inclusive of that second. Minimum allowable time is 2010-11-06T00:00:00Z. - until_id (str) : Returns results with a tweet ID less less than (that is, older than) the specified 'until' tweet ID. Results will exclude the tweet ID provided. Does not override 3200 limit. Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ return self._timeline_lookup( user_id, "tweets", everything=everything, fields=fields, expansions=expansions, **kwargs, ) def get_mentions_timeline( self, user_id: str, *, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, **kwargs, ) -> dict: """ Returns Tweets mentioning a single user specified by the requested user ID. - Max: 800 most recent tweets (using pagination_token) - Default: 10 most recent tweets (tweet_id and text data fields only) Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/timelines/api-reference/get-users-id-mentions Parameters: ---------- - user_id (str) - Unique user ID to include in the query - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) - kwargs - for optional arguments like "max_results" and "pagination_token" Available kwargs: ---------- - end_time (date (ISO 8601)): The newest or most recent UTC timestamp from which the Tweets will be provided. Does not override 3200 limit. Has second granularity, and is inclusive of that second. Minimum allowable time is 2010-11-06T00:00:00Z. - exclude ("retweets" and/or "replies") : Comma-separated list of the types of Tweets to exclude from the response. "retweets" still returns max of 3200 tweets. If "replies" included, only the most recent 800 tweets are returned. - max_results (int) : The number of tweets to try and retrieve, up to a maximum = 100 per distinct request. Otherwise, 10 is returned per request. Minimum = 5. - pagination_token (str) : This parameter is used to move forwards or backwards through 'pages' of results, based on the value of the next_token or previous_token in the response. (E.g., after executing `response = get_tweet_timeline()`, `next_token` can be found with `response["meta"]["next_token"]`) - start_time (date (ISO 8601)) : The oldest or earliest UTC timestamp from which the Tweets will be provided. Does not override 3200 limit. Has second granularity, and is inclusive of that second. Minimum allowable time is 2010-11-06T00:00:00Z. - until_id (str) : Returns results with a tweet ID less less than (that is, older than) the specified 'until' tweet ID. Results will exclude the tweet ID provided. Does not override 3200 limit. Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ return self._timeline_lookup( user_id, "mentions", everything=everything, fields=fields, expansions=expansions, **kwargs, ) def _timeline_lookup( self, user_id: str, endpoint: str, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, **kwargs, ) -> dict: """ Return tweets sent by (Timeline) or mentioning (Mentions) a specific user ID. - Max (Timeline): 3200 most recent tweets (using pagination_token) - Max (Mentions): 800 most recent tweets (using pagination_token) - Default (Both): 10 most recent tweets (tweet_id and text data fields only) Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/timelines/api-reference Parameters: ---------- - user_id (str) - Unique user ID to include in the query - endpoint (str) - valid values are "followers" or "following" - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) - kwargs - for optional arguments like "max_results" and "pagination_token" Available kwargs: ---------- - See user-facing method doc-strings for available kwargs Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ # Check type of query and user_fields if not isinstance(user_id, str): raise ValueError("Invalid parameter type. `user_id` must be str") # Construct URL url = f"{self._base_url}/users/{user_id}/{endpoint}" # Create payload. payload = self._decorate_payload( endpoint_type="tweet", everything=everything, fields=fields, expansions=expansions, ) payload.update(kwargs) response = self._oauth.make_request("GET", url, payload, stream=False) return response.json() ######################################## ######################################## # User endpoints def get_followers( self, user_id: str, *, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, **kwargs, ) -> dict: """ Return a list of users who are followers of the specified user ID. - Max: 1000 user objects per query - Default: 100 user objects per query Ref: https://developer.twitter.com/en/docs/twitter-api/users/follows/api-reference/get-users-id-followers Parameters: ---------- - user_id (str) - Unique user ID to include in the query - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) - kwargs - for optional arguments like "max_results" and "pagination_token" Available kwargs: ---------- - max_results (int) : The maximum number of results to be returned per page. This can be a number between 1 and the 1000. By default, each page will return 100 results. - pagination_token (str) : This parameter is used to move forwards or backwards through 'pages' of results, based on the value of the next_token or previous_token in the response. (E.g., after executing `response = get_tweet_timeline()`, `next_token` can be found with `response["meta"]["next_token"]`) Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ return self._follows_lookup( user_id, "followers", everything=everything, fields=fields, expansions=expansions, **kwargs, ) def get_following( self, user_id: str, *, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, **kwargs, ) -> dict: """ Return a list of users the specified user ID is following. - Max: 1000 user objects per query - Default: 100 user objects per query Ref: https://developer.twitter.com/en/docs/twitter-api/users/follows/api-reference/get-users-id-following Parameters: ---------- - user_id (str) - Unique user ID to include in the query - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) - kwargs - for optional arguments like "max_results" and "pagination_token" Available kwargs: ---------- - max_results (int) : The maximum number of results to be returned per page. This can be a number between 1 and the 1000. By default, each page will return 100 results. - pagination_token (str) : This parameter is used to move forwards or backwards through 'pages' of results, based on the value of the next_token or previous_token in the response. (E.g., after executing `response = get_tweet_timeline()`, `next_token` can be found with `response["meta"]["next_token"]`) Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ return self._follows_lookup( user_id, "following", everything=everything, fields=fields, expansions=expansions, **kwargs, ) def _follows_lookup( self, user_id: str, endpoint: str, *, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, **kwargs, ) -> dict: """ Return a list of users who are followers of or followed by the specified user ID. - Max: 1000 user objects per query - Default: 100 user objects per query Ref: https://developer.twitter.com/en/docs/twitter-api/users/follows/api-reference/get-users-id-followers Parameters: ---------- - user_id (str) - Unique user ID to include in the query - endpoint (str) - valid values are "followers" or "following" - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) - kwargs - for optional arguments like "max_results" and "pagination_token" Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ # Check type of query and user_fields if not isinstance(user_id, str): raise ValueError("Invalid parameter type. `user_id` must be str") # Construct URL url = f"{self._base_url}/users/{user_id}/{endpoint}" # Create payload. payload = self._decorate_payload( endpoint_type="user", everything=everything, fields=fields, expansions=expansions, ) payload.update(kwargs) response = self._oauth.make_request("GET", url, payload, stream=False) return response.json() def user_lookup_ids( self, user_ids: Union[list, tuple], *, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, ) -> dict: """ Looks-up user account information using unique user account id numbers. User fields included by default match the default parameters returned by Twitter. Ref: https://developer.twitter.com/en/docs/twitter-api/users/lookup/api-reference/get-users Parameters: ---------- - user_ids (list, tuple) - unique user ids to include in query (max 100) - everything: (bool) - if True, return all fields and expansions. (default = False) - user_fields (list, tuple) - the user fields included in returned data. (Default = "id", "name", "username") - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ return self._user_lookup( user_ids, "id", everything=everything, fields=fields, expansions=expansions ) def user_lookup_usernames( self, usernames: Union[list, tuple], *, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, ) -> dict: """ Looks-up user account information using account usernames. User fields included by default match the default parameters returned by Twitter. Ref: https://developer.twitter.com/en/docs/twitter-api/users/lookup/api-reference/get-users-by Parameters: ---------- - usernames (list, tuple) - usernames to include in query (max 100) - user_fields (list, tuple) - the user fields included in returned data. (Default = "id", "name", "username") - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ cleaned_usernames = [] for username in usernames: if username.startswith("@"): cleaned_usernames.append(username[1:]) else: cleaned_usernames.append(username) return self._user_lookup( cleaned_usernames, "username", everything=everything, fields=fields, expansions=expansions, ) def _user_lookup( self, query: Union[list, tuple], query_type: str, *, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, ) -> dict: """ Looks-up user account information using unique user id numbers. User fields included by default match the default parameters from twitter. Ref: https://developer.twitter.com/en/docs/twitter-api/users/lookup/api-reference/get-users and https://developer.twitter.com/en/docs/twitter-api/users/lookup/api-reference/get-users-by Parameters: ---------- - query (list, tuple) - unique user ids or usernames (max 100) - query_type (str) - type of the query, can be "id" or "username" - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - dict Raises: ---------- - Exception - ValueError """ query_specs = { "id": {"phrase": "user ids", "parameter_name": "ids", "endpoint": "users"}, "username": { "phrase": "usernames", "parameter_name": "usernames", "endpoint": "users/by", }, }.get(query_type) # Check type of query and user_fields if not isinstance(query, (list, tuple)): raise ValueError( "Invalid parameter type: `query` must be" "either a list or tuple." ) # Make sure the query is no longer than 100 if len(query) <= 100: # create payload. payload = {query_specs["parameter_name"]: f"{','.join(query)}"} else: raise Exception( f"You passed {len(query)} {query_specs['phrase']}. \ This exceeds the maximum for a single query, 100" ) payload = self._decorate_payload( payload=payload, endpoint_type="user", everything=everything, fields=fields, expansions=expansions, ) url = f"{self._base_url}/{query_specs['endpoint']}" response = self._oauth.make_request("GET", url, payload, stream=False) return response.json() ######################################## ######################################## # Streaming endpoints def sampled_stream( self, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, ) -> Generator[bytes, None, None]: """ Streams a random 1% sample of all the tweets. User fields included by default match the default parameters from twitter. Note: Rate Limit Manager cannot be used with this endpoint. When calling osometweet.OAuth2() for authorization, make sure to set the `manage_rate_limits` parameter equal to `False`. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/sampled-stream/introduction Parameters: ---------- - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - Generator Raises: ---------- - Exception """ return self._sampled_stream( everything=everything, fields=fields, expansions=expansions ) def _sampled_stream( self, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, ) -> Generator[bytes, None, None]: """ Streams a filtered 1% sample of all the tweets. User fields included by default match the default parameters from twitter. Note: Rate Limit Manager cannot be used with this endpoint. When calling osometweet.OAuth2() for authorization, make sure to set the `manage_rate_limits` parameter equal to `False`. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/sampled-stream/introduction Parameters: ---------- - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - Generator Raises: ---------- - Exception """ if self._oauth._manage_rate_limits: raise Exception( "Rate Limit Manager cannot be used with streaming endpoints. " "When calling osometweet.OAuth2(), make sure to set the " "`manage_rate_limits` parameter equal to `False` and try again." ) payload = self._decorate_payload( payload=None, endpoint_type="tweet", everything=everything, fields=fields, expansions=expansions, ) url = f"{self._base_url}/tweets/sample/stream" # create a connection to the API that will be used to stream tweets response = self._oauth.make_request( method="GET", url=url, payload=payload, stream=True ) if response.status_code != 200: raise Exception( "Request returned an error: " f"{response.status_code} {response.text}" ) else: # If connection succeeded, return a generator # that is used to stream tweets return response def filtered_stream( self, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, ) -> Generator[bytes, None, None]: """ Streams tweets that match the active streaming rules set by the user. Note: Rate Limit Manager cannot be used with this endpoint. When calling osometweet.OAuth2() for authorization, make sure to set the `manage_rate_limits` parameter equal to `False`. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/filtered-stream/introduction Parameters: ---------- - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - Generator Raises: ---------- - Exception """ return self._filtered_stream( everything=everything, fields=fields, expansions=expansions ) def _filtered_stream( self, everything: bool = False, fields: ObjectFields = None, expansions: UserExpansions = None, ) -> Generator[bytes, None, None]: """ Streams tweets that match the active streaming rules set by the user. Note: Rate Limit Manager cannot be used with this endpoint. When calling osometweet.OAuth2() for authorization, make sure to set the `manage_rate_limits` parameter equal to `False`. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/filtered-stream/introduction Parameters: ---------- - everything: (bool) - if True, return all fields and expansions. (default = False) - fields: (ObjectFields) - additional fields to return. (default = None) - expansions: (UserExpansions) - Expansions enable requests to expand an ID into a full object in the response. (default = None) Returns: ---------- - Generator Raises: ---------- - Exception """ if self._oauth._manage_rate_limits: raise Exception( "Rate Limit Manager cannot be used with streaming endpoints. " "When calling osometweet.OAuth2(), make sure to set the " "`manage_rate_limits` value equal to `False` and try again." ) payload = self._decorate_payload( payload=None, endpoint_type="tweet", everything=everything, fields=fields, expansions=expansions, ) url = f"{self._base_url}/tweets/search/stream" # create a connection to the API that will be used to stream tweets response = self._oauth.make_request( method="GET", url=url, payload=payload, stream=True ) if response.status_code != 200: raise Exception( "Request returned an error: " f"{response.status_code} {response.text}" ) else: # If connection succeeded, return a generator # that is used to stream tweets return response def set_filtered_stream_rule(self, rules, payload={}): """ Modifies active streaming rules, be it by adding or removing them. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/filtered-stream/api-reference/post-tweets-search-stream-rules Parameters: ---------- - rules (dict): Dictionary specifying a rule to be added or deleted - payload (dict, optional): Additional parameters used by the endpoint (default = {}). Returns: dict: API response """ return self._set_filtered_stream_rule(rules, payload) def _set_filtered_stream_rule(self, rules, payload): """ Modifies active streaming rules, be it by adding or removing them. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/filtered-stream/api-reference/post-tweets-search-stream-rules Parameters: ---------- - rules (dict): Dictionary specifying a rule to be added or deleted - payload (dict, optional): Additional parameters used by the endpoint (default = {}). Returns: ---------- - dict: the Twitter API response """ url = f"{self._base_url}/tweets/search/stream/rules" response = self._oauth.make_request( method="POST", url=url, payload=payload, json=rules ) return response.json() def get_filtered_stream_rule(self, payload={}): """ Retrieves active streaming rules. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/filtered-stream/api-reference/get-tweets-search-stream-rules Parameters: ---------- - payload (dict, optional): Additional parameters used by the endpoint (default = {}). Returns: ---------- - dict: Any active filtered streaming rules """ return self._get_filtered_stream_rule(payload) def _get_filtered_stream_rule(self, payload): """ Retrieves active streaming rules. Ref: https://developer.twitter.com/en/docs/twitter-api/tweets/filtered-stream/api-reference/get-tweets-search-stream-rules Parameters: ---------- - payload (dict, optional): Additional parameters used by the endpoint (default = {}). Returns: ---------- - dict: active rules """ url = f"{self._base_url}/tweets/search/stream/rules" response = self._oauth.make_request( method="GET", url=url, payload=payload ) return response.json()
35.809045
131
0.564856
4,525
42,756
5.267624
0.091271
0.00881
0.029913
0.022151
0.792625
0.775004
0.77018
0.756335
0.732212
0.706746
0
0.009898
0.333661
42,756
1,193
132
35.839061
0.826746
0.53244
0
0.598575
0
0
0.126562
0.029937
0
0
0
0
0
1
0.052257
false
0.007126
0.011876
0
0.114014
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
b01fa49ea4f40f2eec0dc6f25f2b2366faaa996d
178
py
Python
comments/filters.py
umatbro/movies-db
7935b9ff52b4a1da1b8a798a64bdc31e52f9698e
[ "MIT" ]
null
null
null
comments/filters.py
umatbro/movies-db
7935b9ff52b4a1da1b8a798a64bdc31e52f9698e
[ "MIT" ]
17
2019-03-16T13:30:12.000Z
2020-06-05T20:04:22.000Z
comments/filters.py
umatbro/movies-db
7935b9ff52b4a1da1b8a798a64bdc31e52f9698e
[ "MIT" ]
null
null
null
import django_filters from comments.models import Comment class CommentFilter(django_filters.FilterSet): class Meta: model = Comment fields = ('movie_id',)
19.777778
46
0.713483
20
178
6.2
0.75
0.209677
0
0
0
0
0
0
0
0
0
0
0.213483
178
8
47
22.25
0.885714
0
0
0
0
0
0.044944
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
b0440402f72a22ba8e6e61cf092d04fffa8beda7
27,846
py
Python
lib/pysnmp/smi/mibs/SNMP-VIEW-BASED-ACM-MIB.py
lowitty/sendtrap
63c194cecd9f4355f4a9edc244d80e54aa9ff0be
[ "MIT" ]
null
null
null
lib/pysnmp/smi/mibs/SNMP-VIEW-BASED-ACM-MIB.py
lowitty/sendtrap
63c194cecd9f4355f4a9edc244d80e54aa9ff0be
[ "MIT" ]
null
null
null
lib/pysnmp/smi/mibs/SNMP-VIEW-BASED-ACM-MIB.py
lowitty/sendtrap
63c194cecd9f4355f4a9edc244d80e54aa9ff0be
[ "MIT" ]
null
null
null
# PySNMP SMI module. Autogenerated from smidump -f python SNMP-VIEW-BASED-ACM-MIB # by libsmi2pysnmp-0.1.3 at Tue Apr 3 16:57:42 2012, # Python version sys.version_info(major=2, minor=7, micro=2, releaselevel='final', serial=0) # Imports ( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString") ( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ( ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint") ( SnmpAdminString, SnmpSecurityLevel, SnmpSecurityModel, ) = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString", "SnmpSecurityLevel", "SnmpSecurityModel") ( ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup") ( Bits, Integer32, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, snmpModules, ) = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Integer32", "ModuleIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "snmpModules") ( RowStatus, StorageType, TestAndIncr, ) = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "StorageType", "TestAndIncr") # Objects snmpVacmMIB = ModuleIdentity((1, 3, 6, 1, 6, 3, 16)).setRevisions(("2002-10-16 00:00","1999-01-20 00:00","1997-11-20 00:00",)) if mibBuilder.loadTexts: snmpVacmMIB.setOrganization("SNMPv3 Working Group") if mibBuilder.loadTexts: snmpVacmMIB.setContactInfo("WG-email: snmpv3@lists.tislabs.com\nSubscribe: majordomo@lists.tislabs.com\n In message body: subscribe snmpv3\n\nCo-Chair: Russ Mundy\n Network Associates Laboratories\npostal: 15204 Omega Drive, Suite 300\n Rockville, MD 20850-4601\n USA\nemail: mundy@tislabs.com\nphone: +1 301-947-7107\n\nCo-Chair: David Harrington\n Enterasys Networks\nPostal: 35 Industrial Way\n P. O. Box 5004\n Rochester, New Hampshire 03866-5005\n USA\nEMail: dbh@enterasys.com\nPhone: +1 603-337-2614\n\nCo-editor: Bert Wijnen\n Lucent Technologies\npostal: Schagen 33\n 3461 GL Linschoten\n Netherlands\nemail: bwijnen@lucent.com\nphone: +31-348-480-685\n\nCo-editor: Randy Presuhn\n BMC Software, Inc.\n\npostal: 2141 North First Street\n San Jose, CA 95131\n USA\nemail: randy_presuhn@bmc.com\nphone: +1 408-546-1006\n\nCo-editor: Keith McCloghrie\n Cisco Systems, Inc.\npostal: 170 West Tasman Drive\n San Jose, CA 95134-1706\n USA\nemail: kzm@cisco.com\nphone: +1-408-526-5260") if mibBuilder.loadTexts: snmpVacmMIB.setDescription("The management information definitions for the\nView-based Access Control Model for SNMP.\n\nCopyright (C) The Internet Society (2002). This\nversion of this MIB module is part of RFC 3415;\nsee the RFC itself for full legal notices.") vacmMIBObjects = MibIdentifier((1, 3, 6, 1, 6, 3, 16, 1)) vacmContextTable = MibTable((1, 3, 6, 1, 6, 3, 16, 1, 1)) if mibBuilder.loadTexts: vacmContextTable.setDescription("The table of locally available contexts.\n\nThis table provides information to SNMP Command\n\nGenerator applications so that they can properly\nconfigure the vacmAccessTable to control access to\nall contexts at the SNMP entity.\n\nThis table may change dynamically if the SNMP entity\nallows that contexts are added/deleted dynamically\n(for instance when its configuration changes). Such\nchanges would happen only if the management\ninstrumentation at that SNMP entity recognizes more\n(or fewer) contexts.\n\nThe presence of entries in this table and of entries\nin the vacmAccessTable are independent. That is, a\ncontext identified by an entry in this table is not\nnecessarily referenced by any entries in the\nvacmAccessTable; and the context(s) referenced by an\nentry in the vacmAccessTable does not necessarily\ncurrently exist and thus need not be identified by an\nentry in this table.\n\nThis table must be made accessible via the default\ncontext so that Command Responder applications have\na standard way of retrieving the information.\n\nThis table is read-only. It cannot be configured via\nSNMP.") vacmContextEntry = MibTableRow((1, 3, 6, 1, 6, 3, 16, 1, 1, 1)).setIndexNames((0, "SNMP-VIEW-BASED-ACM-MIB", "vacmContextName")) if mibBuilder.loadTexts: vacmContextEntry.setDescription("Information about a particular context.") vacmContextName = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 1, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly") if mibBuilder.loadTexts: vacmContextName.setDescription("A human readable name identifying a particular\ncontext at a particular SNMP entity.\n\nThe empty contextName (zero length) represents the\ndefault context.") vacmSecurityToGroupTable = MibTable((1, 3, 6, 1, 6, 3, 16, 1, 2)) if mibBuilder.loadTexts: vacmSecurityToGroupTable.setDescription("This table maps a combination of securityModel and\nsecurityName into a groupName which is used to define\nan access control policy for a group of principals.") vacmSecurityToGroupEntry = MibTableRow((1, 3, 6, 1, 6, 3, 16, 1, 2, 1)).setIndexNames((0, "SNMP-VIEW-BASED-ACM-MIB", "vacmSecurityModel"), (0, "SNMP-VIEW-BASED-ACM-MIB", "vacmSecurityName")) if mibBuilder.loadTexts: vacmSecurityToGroupEntry.setDescription("An entry in this table maps the combination of a\nsecurityModel and securityName into a groupName.") vacmSecurityModel = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 2, 1, 1), SnmpSecurityModel().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("noaccess") if mibBuilder.loadTexts: vacmSecurityModel.setDescription("The Security Model, by which the vacmSecurityName\nreferenced by this entry is provided.\n\nNote, this object may not take the 'any' (0) value.") vacmSecurityName = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 2, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("noaccess") if mibBuilder.loadTexts: vacmSecurityName.setDescription("The securityName for the principal, represented in a\nSecurity Model independent format, which is mapped by\nthis entry to a groupName.") vacmGroupName = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 2, 1, 3), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmGroupName.setDescription("The name of the group to which this entry (e.g., the\ncombination of securityModel and securityName)\nbelongs.\n\nThis groupName is used as index into the\nvacmAccessTable to select an access control policy.\nHowever, a value in this table does not imply that an\ninstance with the value exists in table vacmAccesTable.") vacmSecurityToGroupStorageType = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 2, 1, 4), StorageType().clone('nonVolatile')).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmSecurityToGroupStorageType.setDescription("The storage type for this conceptual row.\nConceptual rows having the value 'permanent' need not\nallow write-access to any columnar objects in the row.") vacmSecurityToGroupStatus = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 2, 1, 5), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmSecurityToGroupStatus.setDescription("The status of this conceptual row.\n\nUntil instances of all corresponding columns are\nappropriately configured, the value of the\n\ncorresponding instance of the vacmSecurityToGroupStatus\ncolumn is 'notReady'.\n\nIn particular, a newly created row cannot be made\nactive until a value has been set for vacmGroupName.\n\nThe RowStatus TC [RFC2579] requires that this\nDESCRIPTION clause states under which circumstances\nother objects in this row can be modified:\n\nThe value of this object has no effect on whether\nother objects in this conceptual row can be modified.") vacmAccessTable = MibTable((1, 3, 6, 1, 6, 3, 16, 1, 4)) if mibBuilder.loadTexts: vacmAccessTable.setDescription("The table of access rights for groups.\n\nEach entry is indexed by a groupName, a contextPrefix,\na securityModel and a securityLevel. To determine\nwhether access is allowed, one entry from this table\nneeds to be selected and the proper viewName from that\nentry must be used for access control checking.\n\nTo select the proper entry, follow these steps:\n\n1) the set of possible matches is formed by the\n intersection of the following sets of entries:\n\n the set of entries with identical vacmGroupName\n the union of these two sets:\n - the set with identical vacmAccessContextPrefix\n - the set of entries with vacmAccessContextMatch\n value of 'prefix' and matching\n vacmAccessContextPrefix\n intersected with the union of these two sets:\n - the set of entries with identical\n vacmSecurityModel\n - the set of entries with vacmSecurityModel\n value of 'any'\n intersected with the set of entries with\n vacmAccessSecurityLevel value less than or equal\n to the requested securityLevel\n\n2) if this set has only one member, we're done\n otherwise, it comes down to deciding how to weight\n the preferences between ContextPrefixes,\n SecurityModels, and SecurityLevels as follows:\n a) if the subset of entries with securityModel\n matching the securityModel in the message is\n not empty, then discard the rest.\n b) if the subset of entries with\n vacmAccessContextPrefix matching the contextName\n in the message is not empty,\n then discard the rest\n c) discard all entries with ContextPrefixes shorter\n than the longest one remaining in the set\n d) select the entry with the highest securityLevel\n\nPlease note that for securityLevel noAuthNoPriv, all\ngroups are really equivalent since the assumption that\nthe securityName has been authenticated does not hold.") vacmAccessEntry = MibTableRow((1, 3, 6, 1, 6, 3, 16, 1, 4, 1)).setIndexNames((0, "SNMP-VIEW-BASED-ACM-MIB", "vacmGroupName"), (0, "SNMP-VIEW-BASED-ACM-MIB", "vacmAccessContextPrefix"), (0, "SNMP-VIEW-BASED-ACM-MIB", "vacmAccessSecurityModel"), (0, "SNMP-VIEW-BASED-ACM-MIB", "vacmAccessSecurityLevel")) if mibBuilder.loadTexts: vacmAccessEntry.setDescription("An access right configured in the Local Configuration\nDatastore (LCD) authorizing access to an SNMP context.\n\nEntries in this table can use an instance value for\nobject vacmGroupName even if no entry in table\nvacmAccessSecurityToGroupTable has a corresponding\nvalue for object vacmGroupName.") vacmAccessContextPrefix = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("noaccess") if mibBuilder.loadTexts: vacmAccessContextPrefix.setDescription("In order to gain the access rights allowed by this\nconceptual row, a contextName must match exactly\n(if the value of vacmAccessContextMatch is 'exact')\nor partially (if the value of vacmAccessContextMatch\nis 'prefix') to the value of the instance of this\nobject.") vacmAccessSecurityModel = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 2), SnmpSecurityModel()).setMaxAccess("noaccess") if mibBuilder.loadTexts: vacmAccessSecurityModel.setDescription("In order to gain the access rights allowed by this\nconceptual row, this securityModel must be in use.") vacmAccessSecurityLevel = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 3), SnmpSecurityLevel()).setMaxAccess("noaccess") if mibBuilder.loadTexts: vacmAccessSecurityLevel.setDescription("The minimum level of security required in order to\ngain the access rights allowed by this conceptual\nrow. A securityLevel of noAuthNoPriv is less than\nauthNoPriv which in turn is less than authPriv.\n\nIf multiple entries are equally indexed except for\nthis vacmAccessSecurityLevel index, then the entry\nwhich has the highest value for\nvacmAccessSecurityLevel is selected.") vacmAccessContextMatch = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 4), Integer().subtype(subtypeSpec=SingleValueConstraint(2,1,)).subtype(namedValues=NamedValues(("exact", 1), ("prefix", 2), )).clone(1)).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmAccessContextMatch.setDescription("If the value of this object is exact(1), then all\nrows where the contextName exactly matches\nvacmAccessContextPrefix are selected.\n\nIf the value of this object is prefix(2), then all\nrows where the contextName whose starting octets\nexactly match vacmAccessContextPrefix are selected.\nThis allows for a simple form of wildcarding.") vacmAccessReadViewName = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32)).clone('')).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmAccessReadViewName.setDescription("The value of an instance of this object identifies\nthe MIB view of the SNMP context to which this\nconceptual row authorizes read access.\n\nThe identified MIB view is that one for which the\nvacmViewTreeFamilyViewName has the same value as the\ninstance of this object; if the value is the empty\nstring or if there is no active MIB view having this\nvalue of vacmViewTreeFamilyViewName, then no access\nis granted.") vacmAccessWriteViewName = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 6), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32)).clone('')).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmAccessWriteViewName.setDescription("The value of an instance of this object identifies\nthe MIB view of the SNMP context to which this\nconceptual row authorizes write access.\n\nThe identified MIB view is that one for which the\nvacmViewTreeFamilyViewName has the same value as the\ninstance of this object; if the value is the empty\nstring or if there is no active MIB view having this\nvalue of vacmViewTreeFamilyViewName, then no access\nis granted.") vacmAccessNotifyViewName = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 7), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32)).clone('')).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmAccessNotifyViewName.setDescription("The value of an instance of this object identifies\nthe MIB view of the SNMP context to which this\nconceptual row authorizes access for notifications.\n\nThe identified MIB view is that one for which the\nvacmViewTreeFamilyViewName has the same value as the\ninstance of this object; if the value is the empty\nstring or if there is no active MIB view having this\nvalue of vacmViewTreeFamilyViewName, then no access\nis granted.") vacmAccessStorageType = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 8), StorageType().clone('nonVolatile')).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmAccessStorageType.setDescription("The storage type for this conceptual row.\n\nConceptual rows having the value 'permanent' need not\nallow write-access to any columnar objects in the row.") vacmAccessStatus = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 4, 1, 9), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmAccessStatus.setDescription("The status of this conceptual row.\n\nThe RowStatus TC [RFC2579] requires that this\nDESCRIPTION clause states under which circumstances\nother objects in this row can be modified:\n\nThe value of this object has no effect on whether\nother objects in this conceptual row can be modified.") vacmMIBViews = MibIdentifier((1, 3, 6, 1, 6, 3, 16, 1, 5)) vacmViewSpinLock = MibScalar((1, 3, 6, 1, 6, 3, 16, 1, 5, 1), TestAndIncr()).setMaxAccess("readwrite") if mibBuilder.loadTexts: vacmViewSpinLock.setDescription("An advisory lock used to allow cooperating SNMP\nCommand Generator applications to coordinate their\nuse of the Set operation in creating or modifying\nviews.\n\nWhen creating a new view or altering an existing\nview, it is important to understand the potential\ninteractions with other uses of the view. The\nvacmViewSpinLock should be retrieved. The name of\nthe view to be created should be determined to be\nunique by the SNMP Command Generator application by\nconsulting the vacmViewTreeFamilyTable. Finally,\nthe named view may be created (Set), including the\nadvisory lock.\nIf another SNMP Command Generator application has\naltered the views in the meantime, then the spin\nlock's value will have changed, and so this creation\nwill fail because it will specify the wrong value for\nthe spin lock.\n\nSince this is an advisory lock, the use of this lock\nis not enforced.") vacmViewTreeFamilyTable = MibTable((1, 3, 6, 1, 6, 3, 16, 1, 5, 2)) if mibBuilder.loadTexts: vacmViewTreeFamilyTable.setDescription("Locally held information about families of subtrees\nwithin MIB views.\n\nEach MIB view is defined by two sets of view subtrees:\n - the included view subtrees, and\n - the excluded view subtrees.\nEvery such view subtree, both the included and the\n\nexcluded ones, is defined in this table.\n\nTo determine if a particular object instance is in\na particular MIB view, compare the object instance's\nOBJECT IDENTIFIER with each of the MIB view's active\nentries in this table. If none match, then the\nobject instance is not in the MIB view. If one or\nmore match, then the object instance is included in,\nor excluded from, the MIB view according to the\nvalue of vacmViewTreeFamilyType in the entry whose\nvalue of vacmViewTreeFamilySubtree has the most\nsub-identifiers. If multiple entries match and have\nthe same number of sub-identifiers (when wildcarding\nis specified with the value of vacmViewTreeFamilyMask),\nthen the lexicographically greatest instance of\nvacmViewTreeFamilyType determines the inclusion or\nexclusion.\n\nAn object instance's OBJECT IDENTIFIER X matches an\nactive entry in this table when the number of\nsub-identifiers in X is at least as many as in the\nvalue of vacmViewTreeFamilySubtree for the entry,\nand each sub-identifier in the value of\nvacmViewTreeFamilySubtree matches its corresponding\nsub-identifier in X. Two sub-identifiers match\neither if the corresponding bit of the value of\nvacmViewTreeFamilyMask for the entry is zero (the\n'wild card' value), or if they are equal.\n\nA 'family' of subtrees is the set of subtrees defined\nby a particular combination of values of\nvacmViewTreeFamilySubtree and vacmViewTreeFamilyMask.\n\nIn the case where no 'wild card' is defined in the\nvacmViewTreeFamilyMask, the family of subtrees reduces\nto a single subtree.\n\nWhen creating or changing MIB views, an SNMP Command\nGenerator application should utilize the\nvacmViewSpinLock to try to avoid collisions. See\nDESCRIPTION clause of vacmViewSpinLock.\n\nWhen creating MIB views, it is strongly advised that\nfirst the 'excluded' vacmViewTreeFamilyEntries are\ncreated and then the 'included' entries.\n\nWhen deleting MIB views, it is strongly advised that\nfirst the 'included' vacmViewTreeFamilyEntries are\n\ndeleted and then the 'excluded' entries.\n\nIf a create for an entry for instance-level access\ncontrol is received and the implementation does not\nsupport instance-level granularity, then an\ninconsistentName error must be returned.") vacmViewTreeFamilyEntry = MibTableRow((1, 3, 6, 1, 6, 3, 16, 1, 5, 2, 1)).setIndexNames((0, "SNMP-VIEW-BASED-ACM-MIB", "vacmViewTreeFamilyViewName"), (0, "SNMP-VIEW-BASED-ACM-MIB", "vacmViewTreeFamilySubtree")) if mibBuilder.loadTexts: vacmViewTreeFamilyEntry.setDescription("Information on a particular family of view subtrees\nincluded in or excluded from a particular SNMP\ncontext's MIB view.\n\nImplementations must not restrict the number of\nfamilies of view subtrees for a given MIB view,\nexcept as dictated by resource constraints on the\noverall number of entries in the\nvacmViewTreeFamilyTable.\n\nIf no conceptual rows exist in this table for a given\nMIB view (viewName), that view may be thought of as\nconsisting of the empty set of view subtrees.") vacmViewTreeFamilyViewName = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 5, 2, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("noaccess") if mibBuilder.loadTexts: vacmViewTreeFamilyViewName.setDescription("The human readable name for a family of view subtrees.") vacmViewTreeFamilySubtree = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 5, 2, 1, 2), ObjectIdentifier()).setMaxAccess("noaccess") if mibBuilder.loadTexts: vacmViewTreeFamilySubtree.setDescription("The MIB subtree which when combined with the\ncorresponding instance of vacmViewTreeFamilyMask\ndefines a family of view subtrees.") vacmViewTreeFamilyMask = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 5, 2, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16)).clone('')).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmViewTreeFamilyMask.setDescription("The bit mask which, in combination with the\ncorresponding instance of vacmViewTreeFamilySubtree,\ndefines a family of view subtrees.\n\nEach bit of this bit mask corresponds to a\nsub-identifier of vacmViewTreeFamilySubtree, with the\nmost significant bit of the i-th octet of this octet\nstring value (extended if necessary, see below)\ncorresponding to the (8*i - 7)-th sub-identifier, and\nthe least significant bit of the i-th octet of this\noctet string corresponding to the (8*i)-th\nsub-identifier, where i is in the range 1 through 16.\n\nEach bit of this bit mask specifies whether or not\nthe corresponding sub-identifiers must match when\ndetermining if an OBJECT IDENTIFIER is in this\nfamily of view subtrees; a '1' indicates that an\nexact match must occur; a '0' indicates 'wild card',\ni.e., any sub-identifier value matches.\n\nThus, the OBJECT IDENTIFIER X of an object instance\nis contained in a family of view subtrees if, for\neach sub-identifier of the value of\nvacmViewTreeFamilySubtree, either:\n\n the i-th bit of vacmViewTreeFamilyMask is 0, or\n\n the i-th sub-identifier of X is equal to the i-th\n sub-identifier of the value of\n vacmViewTreeFamilySubtree.\n\nIf the value of this bit mask is M bits long and\n\nthere are more than M sub-identifiers in the\ncorresponding instance of vacmViewTreeFamilySubtree,\nthen the bit mask is extended with 1's to be the\nrequired length.\n\nNote that when the value of this object is the\nzero-length string, this extension rule results in\na mask of all-1's being used (i.e., no 'wild card'),\nand the family of view subtrees is the one view\nsubtree uniquely identified by the corresponding\ninstance of vacmViewTreeFamilySubtree.\n\nNote that masks of length greater than zero length\ndo not need to be supported. In this case this\nobject is made read-only.") vacmViewTreeFamilyType = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 5, 2, 1, 4), Integer().subtype(subtypeSpec=SingleValueConstraint(1,2,)).subtype(namedValues=NamedValues(("included", 1), ("excluded", 2), )).clone(1)).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmViewTreeFamilyType.setDescription("Indicates whether the corresponding instances of\nvacmViewTreeFamilySubtree and vacmViewTreeFamilyMask\ndefine a family of view subtrees which is included in\nor excluded from the MIB view.") vacmViewTreeFamilyStorageType = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 5, 2, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmViewTreeFamilyStorageType.setDescription("The storage type for this conceptual row.\n\nConceptual rows having the value 'permanent' need not\nallow write-access to any columnar objects in the row.") vacmViewTreeFamilyStatus = MibTableColumn((1, 3, 6, 1, 6, 3, 16, 1, 5, 2, 1, 6), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: vacmViewTreeFamilyStatus.setDescription("The status of this conceptual row.\n\nThe RowStatus TC [RFC2579] requires that this\nDESCRIPTION clause states under which circumstances\nother objects in this row can be modified:\n\nThe value of this object has no effect on whether\nother objects in this conceptual row can be modified.") vacmMIBConformance = MibIdentifier((1, 3, 6, 1, 6, 3, 16, 2)) vacmMIBCompliances = MibIdentifier((1, 3, 6, 1, 6, 3, 16, 2, 1)) vacmMIBGroups = MibIdentifier((1, 3, 6, 1, 6, 3, 16, 2, 2)) # Augmentions # Groups vacmBasicGroup = ObjectGroup((1, 3, 6, 1, 6, 3, 16, 2, 2, 1)).setObjects(*(("SNMP-VIEW-BASED-ACM-MIB", "vacmViewTreeFamilyStorageType"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmAccessContextMatch"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmAccessReadViewName"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmViewTreeFamilyType"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmGroupName"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmSecurityToGroupStatus"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmContextName"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmAccessWriteViewName"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmAccessNotifyViewName"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmAccessStorageType"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmViewTreeFamilyStatus"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmAccessStatus"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmSecurityToGroupStorageType"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmViewTreeFamilyMask"), ("SNMP-VIEW-BASED-ACM-MIB", "vacmViewSpinLock"), ) ) if mibBuilder.loadTexts: vacmBasicGroup.setDescription("A collection of objects providing for remote\nconfiguration of an SNMP engine which implements\n\nthe SNMP View-based Access Control Model.") # Compliances vacmMIBCompliance = ModuleCompliance((1, 3, 6, 1, 6, 3, 16, 2, 1, 1)).setObjects(*(("SNMP-VIEW-BASED-ACM-MIB", "vacmBasicGroup"), ) ) if mibBuilder.loadTexts: vacmMIBCompliance.setDescription("The compliance statement for SNMP engines which\nimplement the SNMP View-based Access Control Model\nconfiguration MIB.") # Exports # Module identity mibBuilder.exportSymbols("SNMP-VIEW-BASED-ACM-MIB", PYSNMP_MODULE_ID=snmpVacmMIB) # Objects mibBuilder.exportSymbols("SNMP-VIEW-BASED-ACM-MIB", snmpVacmMIB=snmpVacmMIB, vacmMIBObjects=vacmMIBObjects, vacmContextTable=vacmContextTable, vacmContextEntry=vacmContextEntry, vacmContextName=vacmContextName, vacmSecurityToGroupTable=vacmSecurityToGroupTable, vacmSecurityToGroupEntry=vacmSecurityToGroupEntry, vacmSecurityModel=vacmSecurityModel, vacmSecurityName=vacmSecurityName, vacmGroupName=vacmGroupName, vacmSecurityToGroupStorageType=vacmSecurityToGroupStorageType, vacmSecurityToGroupStatus=vacmSecurityToGroupStatus, vacmAccessTable=vacmAccessTable, vacmAccessEntry=vacmAccessEntry, vacmAccessContextPrefix=vacmAccessContextPrefix, vacmAccessSecurityModel=vacmAccessSecurityModel, vacmAccessSecurityLevel=vacmAccessSecurityLevel, vacmAccessContextMatch=vacmAccessContextMatch, vacmAccessReadViewName=vacmAccessReadViewName, vacmAccessWriteViewName=vacmAccessWriteViewName, vacmAccessNotifyViewName=vacmAccessNotifyViewName, vacmAccessStorageType=vacmAccessStorageType, vacmAccessStatus=vacmAccessStatus, vacmMIBViews=vacmMIBViews, vacmViewSpinLock=vacmViewSpinLock, vacmViewTreeFamilyTable=vacmViewTreeFamilyTable, vacmViewTreeFamilyEntry=vacmViewTreeFamilyEntry, vacmViewTreeFamilyViewName=vacmViewTreeFamilyViewName, vacmViewTreeFamilySubtree=vacmViewTreeFamilySubtree, vacmViewTreeFamilyMask=vacmViewTreeFamilyMask, vacmViewTreeFamilyType=vacmViewTreeFamilyType, vacmViewTreeFamilyStorageType=vacmViewTreeFamilyStorageType, vacmViewTreeFamilyStatus=vacmViewTreeFamilyStatus, vacmMIBConformance=vacmMIBConformance, vacmMIBCompliances=vacmMIBCompliances, vacmMIBGroups=vacmMIBGroups) # Groups mibBuilder.exportSymbols("SNMP-VIEW-BASED-ACM-MIB", vacmBasicGroup=vacmBasicGroup) # Compliances mibBuilder.exportSymbols("SNMP-VIEW-BASED-ACM-MIB", vacmMIBCompliance=vacmMIBCompliance)
248.625
2,569
0.786397
3,801
27,846
5.760063
0.199947
0.003837
0.005207
0.006943
0.326939
0.280396
0.235361
0.217183
0.195853
0.161917
0
0.028897
0.120125
27,846
111
2,570
250.864865
0.864699
0.011492
0
0
1
0.369048
0.657422
0.113732
0
0
0
0
0
1
0
false
0
0.095238
0
0.095238
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
b04875b9bde1adae00640aa1096e825d9647702a
52
py
Python
reporec/app.py
dgasmith/reporec
c03fe2a32278a6cc7aac5e910c64d5df10f4ce61
[ "BSD-3-Clause" ]
3
2018-10-27T22:34:35.000Z
2019-01-19T18:11:23.000Z
reporec/app.py
dgasmith/reporec
c03fe2a32278a6cc7aac5e910c64d5df10f4ce61
[ "BSD-3-Clause" ]
4
2018-10-27T22:36:40.000Z
2019-01-19T18:20:09.000Z
reporec/app.py
dgasmith/reporec
c03fe2a32278a6cc7aac5e910c64d5df10f4ce61
[ "BSD-3-Clause" ]
2
2018-10-27T21:13:26.000Z
2019-01-11T13:34:56.000Z
# import falcon # api = application = falcon.API()
13
34
0.673077
6
52
5.833333
0.666667
0.514286
0
0
0
0
0
0
0
0
0
0
0.192308
52
3
35
17.333333
0.833333
0.884615
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
c6d4a384e1d8f49786418a12a31939099d33170d
310
py
Python
ask/qa/views.py
ztp99/pywebstepic
59ffd969cb8dc0d602941d6676757cc4e9f7858b
[ "Apache-2.0" ]
null
null
null
ask/qa/views.py
ztp99/pywebstepic
59ffd969cb8dc0d602941d6676757cc4e9f7858b
[ "Apache-2.0" ]
null
null
null
ask/qa/views.py
ztp99/pywebstepic
59ffd969cb8dc0d602941d6676757cc4e9f7858b
[ "Apache-2.0" ]
null
null
null
# ask/qa from django.shortcuts import render from django.utils.html import escape # Create your views here. from django.http import HttpResponse def test (request, *args, **kwargs): return HttpResponse(escape(repr(request))) def quest (request, *args, **kwargs): return HttpResponse('QUEST IS OK')
25.833333
46
0.73871
42
310
5.452381
0.619048
0.131004
0.148472
0.200873
0.305677
0
0
0
0
0
0
0
0.151613
310
12
47
25.833333
0.870722
0.096774
0
0
0
0
0.039568
0
0
0
0
0
0
1
0.285714
false
0
0.428571
0.285714
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
5
059ed401109d4670588d2d7a5cb6310bc534c8b7
266
py
Python
katas/beta/rammstein_needs_your_help.py
the-zebulan/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
40
2016-03-09T12:26:20.000Z
2022-03-23T08:44:51.000Z
katas/beta/rammstein_needs_your_help.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
null
null
null
katas/beta/rammstein_needs_your_help.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
36
2016-11-07T19:59:58.000Z
2022-03-31T11:18:27.000Z
def feuer_frei(concentration, barrels): fuel_hours = barrels * concentration if fuel_hours < 100: return '{} Stunden mehr Benzin ben\xf6tigt.'.format(100 - fuel_hours) elif fuel_hours == 100: return 'Perfekt!' return fuel_hours - 100
33.25
77
0.672932
33
266
5.242424
0.545455
0.260116
0.208092
0.208092
0
0
0
0
0
0
0
0.063725
0.233083
266
7
78
38
0.784314
0
0
0
0
0
0.161654
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.571429
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
05c15eaeb28505feb4dc8a9fd84472afc33bb3a8
44
py
Python
setup/blender/init.py
tokejepsen/pype
8f2b2b631cc5d3ad93eeb5ad3bc6110d32466ed3
[ "MIT" ]
null
null
null
setup/blender/init.py
tokejepsen/pype
8f2b2b631cc5d3ad93eeb5ad3bc6110d32466ed3
[ "MIT" ]
null
null
null
setup/blender/init.py
tokejepsen/pype
8f2b2b631cc5d3ad93eeb5ad3bc6110d32466ed3
[ "MIT" ]
null
null
null
from pype import blender blender.install()
11
24
0.795455
6
44
5.833333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.136364
44
3
25
14.666667
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
05df36f1c84d19bc399b0a1652440997855ebcb8
91
py
Python
built-in/TensorFlow/Research/cv/image_classification/Cars_for_TensorFlow/automl/vega/search_space/networks/pytorch/utils/bbox_utils/assigner/__init__.py
Huawei-Ascend/modelzoo
df51ed9c1d6dbde1deef63f2a037a369f8554406
[ "Apache-2.0" ]
12
2020-12-13T08:34:24.000Z
2022-03-20T15:17:17.000Z
built-in/TensorFlow/Research/cv/image_classification/Cars_for_TensorFlow/automl/vega/search_space/networks/pytorch/utils/bbox_utils/assigner/__init__.py
Huawei-Ascend/modelzoo
df51ed9c1d6dbde1deef63f2a037a369f8554406
[ "Apache-2.0" ]
3
2021-03-31T20:15:40.000Z
2022-02-09T23:50:46.000Z
built-in/TensorFlow/Research/cv/image_classification/Darts_for_TensorFlow/automl/vega/search_space/networks/pytorch/utils/bbox_utils/assigner/__init__.py
Huawei-Ascend/modelzoo
df51ed9c1d6dbde1deef63f2a037a369f8554406
[ "Apache-2.0" ]
2
2021-07-10T12:40:46.000Z
2021-12-17T07:55:15.000Z
from .assign_result import AssignResult from .all_neg_assigner import MaxIoUAllNegAssigner
30.333333
50
0.89011
11
91
7.090909
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.087912
91
2
51
45.5
0.939759
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
05e5a142a0c151f0e3b13eeb576dc1fa3c336f25
126
py
Python
app/main/__init__.py
KempinGe/KempinGe_Blog
06e6c6bb68fa0a6cd424756794a0686010ce812d
[ "Apache-2.0" ]
null
null
null
app/main/__init__.py
KempinGe/KempinGe_Blog
06e6c6bb68fa0a6cd424756794a0686010ce812d
[ "Apache-2.0" ]
null
null
null
app/main/__init__.py
KempinGe/KempinGe_Blog
06e6c6bb68fa0a6cd424756794a0686010ce812d
[ "Apache-2.0" ]
null
null
null
# -*- coding:utf-8 -*- from flask import Blueprint main = Blueprint('main',__name__) from . import errors,views
6.631579
33
0.634921
15
126
5.066667
0.733333
0.342105
0
0
0
0
0
0
0
0
0
0.010309
0.230159
126
18
34
7
0.773196
0.15873
0
0
0
0
0.042553
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
5
af1267ba1946daf6b500635f15312000a103e166
24
py
Python
enemy.py
frederikstroem/new-direction
2c07dfa3cef31cf8c2c4d1ea481e85b70268d31e
[ "MIT" ]
null
null
null
enemy.py
frederikstroem/new-direction
2c07dfa3cef31cf8c2c4d1ea481e85b70268d31e
[ "MIT" ]
null
null
null
enemy.py
frederikstroem/new-direction
2c07dfa3cef31cf8c2c4d1ea481e85b70268d31e
[ "MIT" ]
null
null
null
class Enemy(): pass
8
14
0.583333
3
24
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.291667
24
2
15
12
0.823529
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
af1f4516ef55f93de6645d021e4e0d92f7fe525d
161
py
Python
reddit_backend/reddit/admin.py
cursedclock/reddit-backend
fb5989c758f5459e510f6599c9b9798424c17ba9
[ "MIT" ]
1
2022-01-30T17:27:44.000Z
2022-01-30T17:27:44.000Z
reddit_backend/reddit/admin.py
cursedclock/reddit-backend
fb5989c758f5459e510f6599c9b9798424c17ba9
[ "MIT" ]
null
null
null
reddit_backend/reddit/admin.py
cursedclock/reddit-backend
fb5989c758f5459e510f6599c9b9798424c17ba9
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import User, UserProfile # Register your models here. admin.site.register(User) admin.site.register(UserProfile)
20.125
37
0.807453
22
161
5.909091
0.545455
0.138462
0.261538
0
0
0
0
0
0
0
0
0
0.111801
161
7
38
23
0.909091
0.161491
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
af2379b7c278bc6d1a5094e29739f61d2b525c00
63
py
Python
MonocularDepthEstimation/src/train/customlossfunction/__init__.py
csharpshooter/DeepLearning
c1d20660c32076468970f7376931e1fcd0d2644e
[ "MIT" ]
null
null
null
MonocularDepthEstimation/src/train/customlossfunction/__init__.py
csharpshooter/DeepLearning
c1d20660c32076468970f7376931e1fcd0d2644e
[ "MIT" ]
null
null
null
MonocularDepthEstimation/src/train/customlossfunction/__init__.py
csharpshooter/DeepLearning
c1d20660c32076468970f7376931e1fcd0d2644e
[ "MIT" ]
null
null
null
from .diceloss import DiceLoss from .dicecoeff import DiceCoeff
31.5
32
0.857143
8
63
6.75
0.5
0
0
0
0
0
0
0
0
0
0
0
0.111111
63
2
32
31.5
0.964286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
afa27681a7ebe509ea8956268ea3493cff0f9d2e
34
py
Python
jawfish/static/script/Lib/site-packages/requests.py
NeolithEra/jawfish
22fe222e607f0ad275860c75d81ab41114a18eb3
[ "MIT" ]
52
2016-08-08T15:08:19.000Z
2022-03-23T09:48:53.000Z
jawfish/static/script/Lib/site-packages/requests.py
NeolithEra/jawfish
22fe222e607f0ad275860c75d81ab41114a18eb3
[ "MIT" ]
6
2016-10-09T19:50:49.000Z
2019-08-17T15:34:21.000Z
jawfish/static/script/Lib/site-packages/requests.py
NeolithEra/jawfish
22fe222e607f0ad275860c75d81ab41114a18eb3
[ "MIT" ]
15
2017-02-03T03:08:57.000Z
2021-08-04T06:11:15.000Z
from .requests import __init__.py
17
33
0.823529
5
34
4.8
1
0
0
0
0
0
0
0
0
0
0
0
0.117647
34
1
34
34
0.8
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
1
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
5
afabf0ce2185370a7da3cb2701d2b97943bb83db
177
py
Python
tests/test_generate.py
mariushelf/factory_boss
b9f9c99ea731e2af1c6951f734e20ffe05ba1693
[ "MIT" ]
1
2021-06-03T12:47:17.000Z
2021-06-03T12:47:17.000Z
tests/test_generate.py
mariushelf/factory_boss
b9f9c99ea731e2af1c6951f734e20ffe05ba1693
[ "MIT" ]
5
2021-05-25T12:46:00.000Z
2021-05-27T18:32:37.000Z
tests/test_generate.py
mariushelf/factory_boss
b9f9c99ea731e2af1c6951f734e20ffe05ba1693
[ "MIT" ]
null
null
null
from factory_boss.scripts.generate import main def test_generate_does_not_raise(): main() assert True, "if we reach this line then main() from generate did not raise"
25.285714
80
0.757062
28
177
4.607143
0.75
0.124031
0
0
0
0
0
0
0
0
0
0
0.175141
177
6
81
29.5
0.883562
0
0
0
1
0
0.344633
0
0
0
0
0
0.25
1
0.25
true
0
0.25
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
5
afd3f2778e5b09c6c945894e6e38c1c58540a462
229
py
Python
{{ cookiecutter.project_slug }}/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}_base.py
Chemios/cookiecutter-chemios
0f937c438615177449aefee4d7bd1992ddc8f16b
[ "BSD-3-Clause" ]
null
null
null
{{ cookiecutter.project_slug }}/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}_base.py
Chemios/cookiecutter-chemios
0f937c438615177449aefee4d7bd1992ddc8f16b
[ "BSD-3-Clause" ]
null
null
null
{{ cookiecutter.project_slug }}/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}_base.py
Chemios/cookiecutter-chemios
0f937c438615177449aefee4d7bd1992ddc8f16b
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """Base Class for {{ cookiecutter.instrument_type}} """ from abc import ABC class {{ cookiecutter.instrument_type.title().replace(' ', '').replace('-','') }}(ABC): def __init__(self): pass
20.818182
87
0.60262
25
229
5.28
0.72
0.333333
0.393939
0
0
0
0
0
0
0
0
0.005291
0.174672
229
10
88
22.9
0.693122
0.091703
0
0
0
0
0.013333
0
0
0
0
0
0
0
null
null
0.25
0.25
null
null
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
5
bb782d5ebe421abe4dbccd675c68b6da870b7cdb
70
py
Python
granule_ingester/granule_ingester/consumer/__init__.py
kevinmarlis/incubator-sdap-ingester
7ee17fdf16201c499f7bd35cf398844f2c70f046
[ "Apache-2.0" ]
null
null
null
granule_ingester/granule_ingester/consumer/__init__.py
kevinmarlis/incubator-sdap-ingester
7ee17fdf16201c499f7bd35cf398844f2c70f046
[ "Apache-2.0" ]
1
2021-05-03T22:13:11.000Z
2021-05-03T22:13:11.000Z
granule_ingester/granule_ingester/consumer/__init__.py
kevinmarlis/incubator-sdap-ingester
7ee17fdf16201c499f7bd35cf398844f2c70f046
[ "Apache-2.0" ]
null
null
null
from granule_ingester.consumer.MessageConsumer import MessageConsumer
35
69
0.914286
7
70
9
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.057143
70
1
70
70
0.954545
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
bb7fa5bc494cf138b975c984dc3821302bfde18f
86
py
Python
examples/django_app/api/admin.py
nbxorg/webservices
e66e426b6eb7c91228667f2813a4a8bf70d5571d
[ "BSD-3-Clause" ]
14
2015-11-24T08:56:32.000Z
2022-03-13T22:52:42.000Z
examples/django_app/api/admin.py
nbxorg/webservices
e66e426b6eb7c91228667f2813a4a8bf70d5571d
[ "BSD-3-Clause" ]
2
2015-11-25T06:29:13.000Z
2016-05-10T15:00:06.000Z
examples/django_app/api/admin.py
nbxorg/webservices
e66e426b6eb7c91228667f2813a4a8bf70d5571d
[ "BSD-3-Clause" ]
4
2016-06-08T22:41:05.000Z
2018-07-25T18:09:06.000Z
from django.contrib import admin from api.models import Key admin.site.register(Key)
17.2
32
0.813953
14
86
5
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.116279
86
4
33
21.5
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
bb95203ad6a90afd88ebecb43122665067f20470
39,647
py
Python
tests/integration/test_api_networks_create.py
shildenbrand/GloboNetworkAPI-client-python
728ea9d13e3004e62586f5eb6ae2eae2bc41a50e
[ "Apache-2.0" ]
16
2015-05-09T16:33:01.000Z
2019-10-24T19:06:03.000Z
tests/integration/test_api_networks_create.py
shildenbrand/GloboNetworkAPI-client-python
728ea9d13e3004e62586f5eb6ae2eae2bc41a50e
[ "Apache-2.0" ]
3
2019-08-09T20:18:12.000Z
2019-11-11T17:23:48.000Z
tests/integration/test_api_networks_create.py
shildenbrand/GloboNetworkAPI-client-python
728ea9d13e3004e62586f5eb6ae2eae2bc41a50e
[ "Apache-2.0" ]
15
2015-02-03T17:10:59.000Z
2021-05-14T21:01:37.000Z
# -*- coding: utf-8 -*- import logging import os import sys from itertools import izip from time import time from unittest import TestCase from networkapiclient.ClientFactory import ClientFactory log = logging.getLogger() log.level = logging.DEBUG stream_handler = logging.StreamHandler(sys.stdout) log.addHandler(stream_handler) class ApiNetworksTestCase(TestCase): def setUp(self): self.networkapi_url = os.getenv( 'NETWORKAPI_URL', 'http://10.0.0.2:8000/') self.networkapi_user = os.getenv( 'NETWORKAPI_USER', 'networkapi') self.networkapi_pwd = os.getenv('NETWORKAPI_PWD', 'networkapi') self.client_api = ClientFactory( self.networkapi_url, self.networkapi_user, self.networkapi_pwd) self.sufix = time() self.configs() self.objects = dict() def tearDown(self): try: # Removes Vlans of Racks self.remove_all_vlans([self.objects['id_envrk_A']]) self.remove_all_vlans([self.objects['id_envrk_B']]) self.remove_all_vlans([self.objects['id_envrk_C']]) self.remove_all_vlans([self.objects['id_envrk_D']]) except: pass try: # Removes environment of load balancing self.client_api.create_api_environment()\ .delete(self.objects['id_envlb']) # Removes environment of racks self.client_api.create_api_environment()\ .delete(self.objects['id_envrk_A']) self.client_api.create_api_environment()\ .delete(self.objects['id_envrk_B']) self.client_api.create_api_environment()\ .delete(self.objects['id_envrk_C']) self.client_api.create_api_environment()\ .delete(self.objects['id_envrk_D']) except: pass # Removes vrfs try: self.client_api.create_api_vrf().delete(self.objects['id_vrf']) except: pass # Removes Environment DC for id_envdc in self.objects['id_envdc']: try: self.client_api.create_divisao_dc().remover(id_envdc) except: pass # Removes Environment Logic for id_envlog in self.objects['id_envlog']: try: self.client_api.create_ambiente_logico().remover(id_envlog) except: pass # Removes Environment layer 3 for load balancing for id_envl3 in self.objects['id_envl3']: try: self.client_api.create_grupo_l3().remover(id_envl3) except: pass # Removes eqpts try: self.client_api.create_api_create().delete(self.objects['id_eqpt']) except: pass def test_create_networkv4_by_zero(self): """ Test of integration for create environment, vlan, eqpt networks v4. ################## Starting test: - environment A: - eqpt 1, 2, 3 - vrf 1 - starting vlans 1,2,3 - environment B: - eqpt 2, 4, 5 - vrf 1 - starting vlans 4, 5, 6, 7, 8, 9 - environment C: - EQpt 5, 6 - vrf 2 - startinG vlans 10, 11 - environment D: - eqpt 7 - vrf 1 - starting vlans 1 ################## ################## Inserting new vlans without numbers: - environment A: Expected 10 - environment B: Expected 12 - environment C: Expected 1 - environment C: Expected 2 - environment C: Expected 3 - environment C: Expected 13 - environment B: Expected 14 - environment B: Expected 15 - environment B: Expected 16 - environment A: Expected 11 - environment A: Expected 13 - environment A: Expected 17 - environment A: Expected 18 - environment B: Expected 19 - environment C: Expected 17 - environment D: Expected 1 ################## ################## Starting networks: environment A: Nothing environment B: 10.0.1.0/24 10.0.2.0/25 10.0.3.0/24 environment C: Nothing environment D: Nothing ################## ################## Inserting networks without octs: - environment B: Expected 10.0.0.0/24 - environment C: Expected 10.0.0.0/25 - environment C using prefix 24: Expected 10.0.1.0/24 - environment A: Expected 10.0.2.128/25 - environment A: Expected 10.0.4.0/25 - environment B: Expected 10.0.5.0/24 - environment A: Expected 10.0.4.128/25 - environment D: Expected 10.0.0.0/24 ################## """ # Creates VRF 1 id_vrf_a = self.create_vrf('BeTeste-1') self.objects['id_vrf'] = [id_vrf_a] # Creates VRF 2 id_vrf_b = self.create_vrf('BeTeste-2') self.objects['id_vrf'].append(id_vrf_b) # Creates Environment DC and Logic id_envdc = self.create_envdc('BE-TESTE') id_envlog = self.create_envlog('TESTE') self.objects['id_envdc'] = [id_envdc] self.objects['id_envlog'] = [id_envlog] # Creates environment layer 3 for load balancing id_envl3 = self.create_envl3('BALANCEAMENTO-POOL') self.objects['id_envl3'] = [id_envl3] # Creates environment of load balancing id_env = self.create_env(id_envl3, id_envlog, id_envdc, id_vrf_a, self.configs['env_lb']) self.objects['id_envlb'] = [id_env] env_list = {'A': id_vrf_a, 'B': id_vrf_a, 'C': id_vrf_b, 'D': id_vrf_a} # Creates environments of racks for i in env_list: # Creates environment layer 3 for racks id_envl3 = self.create_envl3('RACK-%s' % i) self.objects['id_envl3'].append(id_envl3) id_env = self.create_env(id_envl3, id_envlog, id_envdc, env_list[i], self.configs[i]) self.objects['id_envrk_' + i] = id_env # Creates equipments with relationship environments self.create_equipments() # Creates vlans with numbers self.create_vlans_with_number_envs() # Creates vlans with auto numbers self.create_vlans_without_number() # Creates networks v4 with octs self.create_netv4_with_octs() # Creates networks with auto octs and prefix self.create_netv4_without_octs() def test_create_networkv6_by_zero(self): """ Test of integration for create environment, vlan, eqpt networks v6. ################## Starting test: - environment A: - eqpt 1, 2, 3 - vrf 1 - starting vlans 1,2,3 - environment B: - eqpt 2, 4, 5 - vrf 1 - starting vLANS 4, 5, 6, 7, 8, 9 - environment C: - EQpt 5, 6 - vrf 2 - startinG VLANS 10, 11 - environment D: - eqpt 7 - vrf 1 - starting vlans 1 ################## ################## Inserting new vlans: - environment A: 10 - environment B: 12 - environment C: 1 - environment C: 2 - environment C: 3 - environment C: 13 - environment B: 14 - environment B: 15 - environment B: 16 - environment A: 11 - environment A: 13 - environment A: 17 - environment A: 18 - environment B: 19 - environment C: 17 - environment D: 1 ################## ################## Starting networks: environment A: Nothing environment B: fdbe:bebe:bebe:1201:0000:0000:0000:0000/64 fdbe:bebe:bebe:1202:0000:0000:0000:0000/65 fdbe:bebe:bebe:1203:0000:0000:0000:0000/64 environment C: Nothing environment D: Nothing ################## ################## Inserting networks: - environment B:fdbe:bebe:bebe:1200:0000:0000:0000:0000/64 - environment C:fdbe:bebe:bebe:1200:0000:0000:0000:0000/65 - environment C using prefix 24: fdbe:bebe:bebe:1201:0000:0000:0000:0000/64 - environment A:fdbe:bebe:bebe:1202:8000:0000:0000:0000/65 - environment A:fdbe:bebe:bebe:1204:0000:0000:0000:0000/65 - environment B:fdbe:bebe:bebe:1205:0000:0000:0000:0000/64 - environment A:fdbe:bebe:bebe:1204:8000:0000:0000:0000/65 - environment D:fdbe:bebe:bebe:1200:0000:0000:0000:0000/64 ################## """ # Creates VRF 1 id_vrf_a = self.create_vrf('BeTeste-1') self.objects['id_vrf'] = [id_vrf_a] # Creates VRF 2 id_vrf_b = self.create_vrf('BeTeste-2') self.objects['id_vrf'].append(id_vrf_b) # Creates Environment DC and Logic id_envdc = self.create_envdc('BE-TESTE') id_envlog = self.create_envlog('TESTE') self.objects['id_envdc'] = [id_envdc] self.objects['id_envlog'] = [id_envlog] # Creates environment layer 3 for load balancing id_envl3 = self.create_envl3('BALANCEAMENTO-POOL') self.objects['id_envl3'] = [id_envl3] # Creates environment of load balancing id_env = self.create_env(id_envl3, id_envlog, id_envdc, id_vrf_a, self.configs['env_lb']) self.objects['id_envlb'] = [id_env] env_list = {'A': id_vrf_a, 'B': id_vrf_a, 'C': id_vrf_b, 'D': id_vrf_a} # Creates environments of racks for i in env_list: # Creates environment layer 3 for racks id_envl3 = self.create_envl3('RACK-%s' % i) self.objects['id_envl3'].append(id_envl3) id_env = self.create_env(id_envl3, id_envlog, id_envdc, env_list[i], self.configs[i]) self.objects['id_envrk_' + i] = id_env # Creates equipments with relationship environments self.create_equipments() # Creates vlans with numbers self.create_vlans_with_number_envs() # Creates vlans with auto numbers self.create_vlans_without_number() # Creates networks with octs self.create_netv6_with_octs() # Creates networks with auto octs and prefix self.create_netv6_without_octs() def configs(self): self.configs = { 'env_lb': [{ 'subnet': 'febe:bebe:bebe:8200:0:0:0:0/57', 'new_prefix': '64', 'type': 'v6', 'network_type': 8 }, { 'subnet': '10.10.0.0/16', 'new_prefix': '24', 'type': 'v4', 'network_type': 8 }], 'A': [{ 'subnet': 'fdbe:bebe:bebe:1200:0000:0000:0000:0000/57', 'new_prefix': '65', 'type': 'v6', 'network_type': 8 }, { 'subnet': '10.0.0.0/16', 'new_prefix': '25', 'type': 'v4', 'network_type': 8 }], 'B': [{ 'subnet': 'fdbe:bebe:bebe:1200:0000:0000:0000:0000/57', 'new_prefix': '64', 'type': 'v6', 'network_type': 8 }, { 'subnet': '10.0.0.0/16', 'new_prefix': '24', 'type': 'v4', 'network_type': 8 }], 'C': [{ 'subnet': 'fdbe:bebe:bebe:1200:0000:0000:0000:0000/57', 'new_prefix': '65', 'type': 'v6', 'network_type': 8 }, { 'subnet': '10.0.0.0/16', 'new_prefix': '25', 'type': 'v4', 'network_type': 8 }], 'D': [{ 'subnet': 'fdbe:bebe:bebe:1200:0000:0000:0000:0000/57', 'new_prefix': '64', 'type': 'v6', 'network_type': 8 }, { 'subnet': '10.0.0.0/16', 'new_prefix': '24', 'type': 'v4', 'network_type': 8 }] } def create_envdc(self, name): """Creates Environment DC""" id_envdc = self.client_api.create_divisao_dc()\ .inserir('%s-%s' % (name, self.sufix))['division_dc']['id'] return id_envdc def create_envlog(self, name): """Creates Environment Logic""" id_envlog = self.client_api.create_ambiente_logico()\ .inserir('%s-%s' % (name, self.sufix))['logical_environment']['id'] return id_envlog def create_envl3(self, name): """Creates environment layer 3 for load balancing.""" id_envl3 = self.client_api.create_grupo_l3()\ .inserir('%s-%s' % (name, self.sufix))['group_l3']['id'] return id_envl3 def create_vrf(self, name): """Creates VRF.""" vrf_dict = [{ 'internal_name': '%s-%s' % (name, self.sufix), 'vrf': '%s-%s' % (name, self.sufix) }] id_vrf = self.client_api.create_api_vrf()\ .create(vrf_dict)[0]['id'] return id_vrf def create_env(self, id_envl3, id_envlog, id_envdc, id_vrf, configs): """Creates environment.""" env_dict = [{ 'grupo_l3': int(id_envl3), 'ambiente_logico': int(id_envlog), 'divisao_dc': int(id_envdc), 'filter': 1, 'default_vrf': id_vrf, 'min_num_vlan_1': 1, 'max_num_vlan_1': 500, 'min_num_vlan_2': 1000, 'max_num_vlan_2': 1500, 'configs': configs }] id_env = self.client_api.create_api_environment()\ .create(env_dict)[0]['id'] return id_env def create_equipments(self): """Creates equipments.""" eqpt_dict = [{ 'name': 'TESTE-EQUIP-1%s' % self.sufix, 'maintenance': False, 'equipment_type': 3, 'model': 1, 'environments': [{ 'is_router': False, 'environment': self.objects['id_envrk_A'] }] }, { 'name': 'TESTE-EQUIP-2%s' % self.sufix, 'maintenance': False, 'equipment_type': 3, 'model': 1, 'environments': [{ 'is_router': False, 'environment': self.objects['id_envrk_A'] }] }, { 'name': 'TESTE-EQUIP-3%s' % self.sufix, 'maintenance': False, 'equipment_type': 3, 'model': 1, 'environments': [{ 'is_router': False, 'environment': self.objects['id_envrk_A'] }, { 'is_router': False, 'environment': self.objects['id_envrk_B'] }] }, { 'name': 'TESTE-EQUIP-4%s' % self.sufix, 'maintenance': False, 'equipment_type': 3, 'model': 1, 'environments': [{ 'is_router': False, 'environment': self.objects['id_envrk_B'] }] }, { 'name': 'TESTE-EQUIP-5%s' % self.sufix, 'maintenance': False, 'equipment_type': 3, 'model': 1, 'environments': [{ 'is_router': False, 'environment': self.objects['id_envrk_B'] }, { 'is_router': False, 'environment': self.objects['id_envrk_C'] }] }, { 'name': 'TESTE-EQUIP-6%s' % self.sufix, 'maintenance': False, 'equipment_type': 3, 'model': 1, 'environments': [{ 'is_router': False, 'environment': self.objects['id_envrk_C'] }] }, { 'name': 'TESTE-EQUIP-7%s' % self.sufix, 'maintenance': False, 'equipment_type': 3, 'model': 1, 'environments': [{ 'is_router': False, 'environment': self.objects['id_envrk_D'] }] }] ids = self.client_api.create_api_equipment().create(eqpt_dict) self.objects['id_eqpt'] = ids def create_vlans_without_number(self): """Creates vlans without number.""" id_env_a = self.objects['id_envrk_A'] id_env_b = self.objects['id_envrk_B'] id_env_c = self.objects['id_envrk_C'] id_env_d = self.objects['id_envrk_D'] list_envs_alocate_vlans = [ id_env_a, id_env_b, id_env_c, id_env_c, id_env_c, id_env_c, id_env_b, id_env_b, id_env_b, id_env_a, id_env_a, id_env_a, id_env_a, id_env_b, id_env_c, id_env_d ] list_expected_num_vlans = [ 10, 12, 1, 2, 3, 13, 14, 15, 16, 11, 13, 17, 18, 19, 17, 1 ] vlans = [] for i, id_env in enumerate(list_envs_alocate_vlans): vlan = [{ 'name': 'Vlan auto %s - %s' % (i, self.sufix), 'environment': id_env }] id_vlan = [self.client_api.create_api_vlan().create(vlan)[0]['id']] vlan_obj = self.client_api.create_api_vlan().get(id_vlan)[ 'vlans'][0] vlans.append(vlan_obj) self.verify_num_vlan(vlans, list_expected_num_vlans) def create_vlans_with_number(self, nums_vlan, id_env): """Creates vlans with number.""" vlans = [] for num_vlan in nums_vlan: vlan = [{ 'name': 'Vlan %s' % (num_vlan), 'environment': id_env, 'num_vlan': num_vlan }] id_vlan = [self.client_api.create_api_vlan().create(vlan)[0]['id']] vlan_obj = self.client_api.create_api_vlan().get(id_vlan)[ 'vlans'][0] vlans.append(vlan_obj) return vlans def create_vlans_with_number_envs(self): """Creates vlans with number for environments A, B and C.""" id_env_a = self.objects['id_envrk_A'] id_env_b = self.objects['id_envrk_B'] id_env_c = self.objects['id_envrk_C'] # Environment A # [1, 2, 3] nums_vlan = range(1, 4) # Creates Vlans id_vlans = self.create_vlans_with_number(nums_vlan, id_env_a) ids = [id_vlan['id'] for id_vlan in id_vlans] # Get Vlans vlans = self.client_api.create_api_vlan().get(ids)['vlans'] # Verify num vlans was created self.verify_num_vlan(vlans, nums_vlan) # Environment B # [4, 5, 6, 7, 8, 9] nums_vlan = range(4, 10) # Creates Vlans id_vlans = self.create_vlans_with_number(nums_vlan, id_env_b) ids = [id_vlan['id'] for id_vlan in id_vlans] # Get Vlans vlans = self.client_api.create_api_vlan().get(ids)['vlans'] # Verify num vlans was created self.verify_num_vlan(vlans, nums_vlan) # Environment C # [10, 11] nums_vlan = range(10, 12) # Creates Vlans id_vlans = self.create_vlans_with_number(nums_vlan, id_env_c) ids = [id_vlan['id'] for id_vlan in id_vlans] # Get Vlans vlans = self.client_api.create_api_vlan().get(ids)['vlans'] # Verify num vlans was created self.verify_num_vlan(vlans, nums_vlan) def verify_num_vlan(self, objs, nums_vlan): for obj, num_vlan in izip(objs, nums_vlan): self.assertEqual( num_vlan, obj.get('num_vlan'), 'Num vlan should be %s was %s' % ( num_vlan, obj.get('num_vlan')) ) def search_all_vlans(self, ids_env): search_vlan = { 'start_record': 0, 'end_record': 100, 'asorting_cols': [], 'searchable_columns': [], 'extends_search': [ {'ambiente': id_env} for id_env in ids_env ] } vlans = self.client_api.create_api_vlan()\ .search(search=search_vlan, fields=['id'])['vlans'] ids_vlans = [id_vlan['id'] for id_vlan in vlans] return ids_vlans def remove_all_vlans(self, ids_env): ids_vlans = self.search_all_vlans(ids_env) if ids_vlans: search_net = { 'start_record': 0, 'end_record': 100, 'asorting_cols': [], 'searchable_columns': [], 'extends_search': [{'vlan': id_vlan} for id_vlan in ids_vlans] } networks = self.client_api.create_api_network_ipv4()\ .search(search=search_net, fields=['id'])['networks'] ids_networks = [id['id'] for id in networks] if ids_networks: self.client_api.create_api_network_ipv4().delete(ids_networks) networks = self.client_api.create_api_network_ipv6()\ .search(search=search_net, fields=['id'])['networks'] ids_networks = [id['id'] for id in networks] if ids_networks: self.client_api.create_api_network_ipv6().delete(ids_networks) self.client_api.create_api_vlan().delete(ids_vlans) def create_netv4_with_octs(self): """Creates networks v4 using first vlan.""" networks = [{ 'oct1': 10, 'oct2': 0, 'oct3': 1, 'oct4': 0, 'prefix': 24, 'env': self.objects['id_envrk_B'] }, { 'oct1': 10, 'oct2': 0, 'oct3': 2, 'oct4': 0, 'prefix': 25, 'env': self.objects['id_envrk_B'] }, { 'oct1': 10, 'oct2': 0, 'oct3': 3, 'oct4': 0, 'prefix': 24, 'env': self.objects['id_envrk_B'] }] fields = [ 'oct1', 'oct2', 'oct3', 'oct4', 'prefix', 'vlan' ] for network_send in networks: # Get all vlans of environment ids_vlans = self.search_all_vlans([network_send.get('env')]) del network_send['env'] # Creates networks v4 network_send['vlan'] = ids_vlans[0] network = [{ 'oct1': network_send.get('oct1'), 'oct2': network_send.get('oct2'), 'oct3': network_send.get('oct3'), 'oct4': network_send.get('oct4'), 'prefix': network_send.get('prefix'), 'vlan': network_send.get('vlan'), 'network_type': 6, 'environmentvip': None }] id_network = self.client_api.create_api_network_ipv4()\ .create(network)[0]['id'] # Get object created network_rec = self.client_api.create_api_network_ipv4()\ .get([id_network], fields=fields)['networks'][0] # Verify if object is right self.assertDictEqual( network_send, network_rec, 'Network should be %s and was %s' % (network_send, network_rec) ) def create_netv4_without_octs(self): networks = [ { 'prefix': None, 'env': self.objects['id_envrk_B'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_C'], 'network_type': 6, 'environmentvip': None }, { 'prefix': 24, 'env': self.objects['id_envrk_C'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_A'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_A'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_B'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_A'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_D'], 'network_type': 6, 'environmentvip': None } ] expected_networks = [ { 'oct1': 10, 'oct2': 0, 'oct3': 0, 'oct4': 0, 'prefix': 24, 'mask_oct1': 255, 'mask_oct2': 255, 'mask_oct3': 255, 'mask_oct4': 0, }, { 'oct1': 10, 'oct2': 0, 'oct3': 0, 'oct4': 0, 'prefix': 25, 'mask_oct1': 255, 'mask_oct2': 255, 'mask_oct3': 255, 'mask_oct4': 128, }, { 'oct1': 10, 'oct2': 0, 'oct3': 1, 'oct4': 0, 'prefix': 24, 'mask_oct1': 255, 'mask_oct2': 255, 'mask_oct3': 255, 'mask_oct4': 0, }, { 'oct1': 10, 'oct2': 0, 'oct3': 2, 'oct4': 128, 'prefix': 25, 'mask_oct1': 255, 'mask_oct2': 255, 'mask_oct3': 255, 'mask_oct4': 128 }, { 'oct1': 10, 'oct2': 0, 'oct3': 4, 'oct4': 0, 'prefix': 25, 'mask_oct1': 255, 'mask_oct2': 255, 'mask_oct3': 255, 'mask_oct4': 128 }, { 'oct1': 10, 'oct2': 0, 'oct3': 5, 'oct4': 0, 'prefix': 24, 'mask_oct1': 255, 'mask_oct2': 255, 'mask_oct3': 255, 'mask_oct4': 0 }, { 'oct1': 10, 'oct2': 0, 'oct3': 4, 'oct4': 128, 'prefix': 25, 'mask_oct1': 255, 'mask_oct2': 255, 'mask_oct3': 255, 'mask_oct4': 128 }, { 'oct1': 10, 'oct2': 0, 'oct3': 0, 'oct4': 0, 'prefix': 24, 'mask_oct1': 255, 'mask_oct2': 255, 'mask_oct3': 255, 'mask_oct4': 0 } ] fields = [ 'oct1', 'oct2', 'oct3', 'oct4', 'prefix', 'mask_oct1', 'mask_oct2', 'mask_oct3', 'mask_oct4', ] for network_send, expected_network in izip(networks, expected_networks): # Get all vlans of environment ids_vlans = self.search_all_vlans([network_send.get('env')]) # Creates networks v4 network_send['vlan'] = ids_vlans[0] id_network = self.client_api.create_api_network_ipv4()\ .create([network_send])[0]['id'] network_rec = self.client_api.create_api_network_ipv4()\ .get([id_network], fields=fields)['networks'][0] self.assertDictEqual( expected_network, network_rec, 'Network should be %s and was %s' % ( expected_network, network_rec) ) def create_netv6_with_octs(self): """Creates networks v6 using first vlan.""" networks = [{ 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1201', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 64, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '0000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000', 'env': self.objects['id_envrk_B'] }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1202', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 65, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '8000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000', 'env': self.objects['id_envrk_B'] }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1203', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 64, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '0000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000', 'env': self.objects['id_envrk_B'] }] fields = [ 'block1', 'block2', 'block3', 'block4', 'block5', 'block6', 'block7', 'block8', 'prefix', 'mask1', 'mask2', 'mask3', 'mask4', 'mask5', 'mask6', 'mask7', 'mask8', 'vlan' ] for network_send in networks: # Get all vlans of environment ids_vlans = self.search_all_vlans([network_send.get('env')]) del network_send['env'] # Creates networks v4 network_send['vlan'] = ids_vlans[0] network = [{ 'block1': network_send.get('block1'), 'block2': network_send.get('block2'), 'block3': network_send.get('block3'), 'block4': network_send.get('block4'), 'block5': network_send.get('block5'), 'block6': network_send.get('block6'), 'block7': network_send.get('block7'), 'block8': network_send.get('block8'), 'prefix': network_send.get('prefix'), 'vlan': network_send.get('vlan'), 'network_type': 6, 'environmentvip': None }] id_network = self.client_api.create_api_network_ipv6()\ .create(network)[0]['id'] # Get object created network_rec = self.client_api.create_api_network_ipv6()\ .get([id_network], fields=fields)['networks'][0] # Verify if object is right self.assertDictEqual( network_send, network_rec, 'Network should be %s and was %s' % (network_send, network_rec) ) def create_netv6_without_octs(self): networks = [ { 'prefix': None, 'env': self.objects['id_envrk_B'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_C'], 'network_type': 6, 'environmentvip': None }, { 'prefix': 64, 'env': self.objects['id_envrk_C'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_A'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_A'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_B'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_A'], 'network_type': 6, 'environmentvip': None }, { 'prefix': None, 'env': self.objects['id_envrk_D'], 'network_type': 6, 'environmentvip': None } ] expected_networks = [{ 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1200', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 64, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '0000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000' }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1200', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 65, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '8000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000' }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1201', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 64, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '0000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000' }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1202', 'block5': '8000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 65, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '8000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000' }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1204', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 65, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '8000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000' }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1205', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 64, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '0000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000' }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1204', 'block5': '8000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 65, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '8000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000' }, { 'block1': 'fdbe', 'block2': 'bebe', 'block3': 'bebe', 'block4': '1200', 'block5': '0000', 'block6': '0000', 'block7': '0000', 'block8': '0000', 'prefix': 64, 'mask1': 'ffff', 'mask2': 'ffff', 'mask3': 'ffff', 'mask4': 'ffff', 'mask5': '0000', 'mask6': '0000', 'mask7': '0000', 'mask8': '0000' }] fields = [ 'block1', 'block2', 'block3', 'block4', 'block5', 'block6', 'block7', 'block8', 'prefix', 'mask1', 'mask2', 'mask3', 'mask4', 'mask5', 'mask6', 'mask7', 'mask8' ] for network_send, expected_network in izip(networks, expected_networks): # Get all vlans of environment ids_vlans = self.search_all_vlans([network_send.get('env')]) # Creates networks v4 network_send['vlan'] = ids_vlans[0] id_network = self.client_api.create_api_network_ipv6()\ .create([network_send])[0]['id'] network_rec = self.client_api.create_api_network_ipv6()\ .get([id_network], fields=fields)['networks'][0] self.assertDictEqual( expected_network, network_rec, 'Network should be %s and was %s' % ( expected_network, network_rec) )
30.877726
87
0.446465
3,894
39,647
4.336415
0.063431
0.0456
0.053121
0.051167
0.783075
0.760156
0.715504
0.703482
0.672687
0.665166
0
0.075473
0.418165
39,647
1,283
88
30.901793
0.656537
0.138295
0
0.736674
0
0
0.186192
0.005998
0
0
0
0
0.00533
1
0.022388
false
0.007463
0.007463
0
0.03838
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
bba15f3003caa41222e3b892647e4f127a81af54
209
py
Python
ddrr/templatetags/ddrr.py
denizdogan/django-debug-requests-responses
f4e76f167a9f2144849469a894f8bee65b6355d9
[ "MIT" ]
18
2019-05-18T01:51:59.000Z
2021-09-28T17:00:56.000Z
ddrr/templatetags/ddrr.py
denizdogan/django-debug-requests-responses
f4e76f167a9f2144849469a894f8bee65b6355d9
[ "MIT" ]
6
2019-05-27T09:32:28.000Z
2021-11-24T13:01:19.000Z
ddrr/templatetags/ddrr.py
denizdogan/django-debug-requests-responses
f4e76f167a9f2144849469a894f8bee65b6355d9
[ "MIT" ]
1
2021-12-01T10:43:25.000Z
2021-12-01T10:43:25.000Z
from django import template from django.utils.termcolors import colorize as dj_colorize register = template.Library() @register.filter def colorize(value, fg): return dj_colorize(value, tuple(), fg=fg)
20.9
59
0.77512
29
209
5.517241
0.586207
0.125
0
0
0
0
0
0
0
0
0
0
0.133971
209
9
60
23.222222
0.883978
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
bba2a21d55bfd7b513cbf079e63cad77a989712b
3,472
py
Python
test/test_mets_header.py
DILCISBoard/py-ip-validator
1a00f7205d3676cf24c993076614fcbeb50cf8d7
[ "Apache-2.0" ]
2
2018-11-20T12:17:57.000Z
2019-09-28T21:01:38.000Z
test/test_mets_header.py
DILCISBoard/py-ip-validator
1a00f7205d3676cf24c993076614fcbeb50cf8d7
[ "Apache-2.0" ]
2
2020-06-15T09:28:44.000Z
2020-06-18T10:30:26.000Z
test/test_mets_header.py
DILCISBoard/py-ip-validator
1a00f7205d3676cf24c993076614fcbeb50cf8d7
[ "Apache-2.0" ]
null
null
null
from base import Base from metsvalidator.mets_validator_impl import validate # In this class we test first the correct mets header element and then different error samples. # If validation error was detected - validation result should be False. class MetsHeaderTestCase(Base): """ Tests for METS header element """ def test_load_rules(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_element_ok.xml") self.assertTrue(validationResult==True) def test_csip7_check_header_element(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_element_not_exists.xml") self.assertTrue(validationResult==True) def test_csip9_check_package_creation_date(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_package_creation_date_not_exists.xml") self.assertTrue(validationResult==True) def test_csip10_check_package_last_modification_date(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_package_last_modification_date_not_exists.xml") self.assertTrue(validationResult==True) def test_csip11_check_oais_package_type(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_oais_package_type_not_exists.xml") self.assertTrue(validationResult==True) validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_oais_package_type_value_error.xml") self.assertTrue(validationResult==True) def test_csip12_check_header_agent_element(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_agent_element_not_exists.xml") self.assertTrue(validationResult==True) def test_csip13_check_header_agent_role_element(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_element_agent_role_not_exists.xml") self.assertTrue(validationResult==True) def test_csip15_check_header_agent_type_element(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_element_agent_type_not_exists.xml") self.assertTrue(validationResult==True) def test_csip16_check_header_agent_other_type_element(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_element_agent_othertype_not_exists.xml") self.assertTrue(validationResult==True) def test_csip17_check_header_agent_name_element(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_element_agent_name_not_exists.xml") self.assertTrue(validationResult==True) def test_csip18_check_header_agent_note_element(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_element_agent_note_not_exists.xml") self.assertTrue(validationResult==True) def test_csip19_check_header_agent_note_type_element(self): validationResult, report = validate(self.rules, self.SOURCES_PATH+"mets_header_element/mets_header_element_agent_note_type_not_exists.xml") self.assertTrue(validationResult==True)
58.847458
154
0.799827
441
3,472
5.888889
0.1678
0.107817
0.150558
0.170196
0.768194
0.768194
0.768194
0.734309
0.695803
0.593762
0
0.00656
0.121832
3,472
58
155
59.862069
0.845195
0.056452
0
0.317073
0
0
0.25528
0.25528
0
0
0
0
0.317073
1
0.292683
false
0
0.04878
0
0.365854
0
0
0
0
null
0
0
1
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
bbca0296dd3dfc0866b4e452ed8724267e5ba873
40
py
Python
build/lib/TopDownCrawl/__main__.py
bhcooper/TopDownCrawl
b02a5102d25810a4875b9bdf9a62a07e02715202
[ "MIT" ]
null
null
null
build/lib/TopDownCrawl/__main__.py
bhcooper/TopDownCrawl
b02a5102d25810a4875b9bdf9a62a07e02715202
[ "MIT" ]
null
null
null
build/lib/TopDownCrawl/__main__.py
bhcooper/TopDownCrawl
b02a5102d25810a4875b9bdf9a62a07e02715202
[ "MIT" ]
null
null
null
from .TopDownCrawl import main main()
13.333333
31
0.75
5
40
6
0.8
0
0
0
0
0
0
0
0
0
0
0
0.175
40
3
32
13.333333
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
bbce7724994cb0f64a0a8e7ce81df29db33a46db
45,206
py
Python
examples/regcoilPaper_figure10d_constArclengthAngle/tests.py
landreman/regcoil
99f9abf8b0b0c6ec7bb6e7975dbee5e438808162
[ "BSD-2-Clause" ]
5
2017-05-26T14:08:43.000Z
2020-12-30T10:22:26.000Z
examples/regcoilPaper_figure10d_constArclengthAngle/tests.py
landreman/regcoil
99f9abf8b0b0c6ec7bb6e7975dbee5e438808162
[ "BSD-2-Clause" ]
8
2018-02-17T07:44:41.000Z
2020-06-30T20:34:25.000Z
examples/regcoilPaper_figure10d_constArclengthAngle/tests.py
landreman/regcoil
99f9abf8b0b0c6ec7bb6e7975dbee5e438808162
[ "BSD-2-Clause" ]
4
2016-12-13T18:15:05.000Z
2019-06-07T20:58:01.000Z
#!/usr/bin/env python # This python script checks the output file for an example to # see if the results are close to expected values. This script may be # run directly, and it is also called when "make test" is run from the # main REGCOIL directory. execfile('../testsCommon.py') absoluteTolerance = 1e-100 numFailures = 0 f = readOutputFile() variableName = 'lambda' data = f.variables[variableName][()] relativeTolerance = 1e-12 numFailures += arrayShouldBe(data, [0, 1e-15, 1.33352143216332e-15, 1.77827941003892e-15,\ 2.37137370566166e-15, 3.16227766016838e-15, 4.21696503428582e-15,\ 5.62341325190349e-15, 7.49894209332456e-15, 1e-14],relativeTolerance,absoluteTolerance) # The values here for chi2_B, chi2_K, max_Bnormal, and max_K are copied from the results for the original poloidal angle, to verify the results are unchanged under the transformation. We need a more generous tolerance than usual in order for the tests to pass. variableName = 'chi2_B' data = f.variables[variableName][()] relativeTolerance = 3e-3 # Note: we exclude the first element (the unregularized case lambda=0) since it can be overwhelmed by roundoff error. numFailures += arrayShouldBe(data[1:], [0.174519878306313, 0.233148551834137, \ 0.310446213587783, 0.411565852326815, 0.542616084841207, \ 0.710568315783369, 0.922890554110523, 1.18673864681126, 1.50770781013225],relativeTolerance,absoluteTolerance) variableName = 'chi2_K' data = f.variables[variableName][()] relativeTolerance = 1e-4 numFailures += arrayShouldBe(data[1:], [1.74957088182873e+15, 1.6989658516062e+15,\ 1.64892520707378e+15, 1.59982527129183e+15, 1.55209554042516e+15,\ 1.50621123410416e+15, 1.46269693947801e+15, 1.42212837760326e+15,\ 1.38509938973785e+15], relativeTolerance,absoluteTolerance) variableName = 'max_Bnormal' data = f.variables[variableName][()] relativeTolerance = 1e-3 numFailures += arrayShouldBe(data[1:], [ 0.117555544291034, 0.134561224876993,\ 0.153923792979921, 0.176347144447289, 0.201603899107085,\ 0.229977435728317, 0.261839981656698, 0.296625161466877, 0.333988791941713], relativeTolerance,absoluteTolerance) variableName = 'max_K' data = f.variables[variableName][()] relativeTolerance = 1e-3 numFailures += arrayShouldBe(data[1:], [8824481.60102707, 8355450.23653953,\ 7884212.39105115, 7412969.17310808, 6945281.57578389, 6485916.75531679,\ 6040435.92861007, 5614638.75966927, 5214018.48872153], relativeTolerance,absoluteTolerance) variableName = 'single_valued_current_potential_mn' data = f.variables[variableName][()] #print data.shape relativeTolerance = 1e-4 absoluteTolerance = 1e-6 numFailures += arrayShouldBe(data[1,:], \ [-503975.8014005262, -32679.790309051416, -7710.5535328207525, -3725.9646511380906, -396.6522222653387, -217.26377944412482, -44.592130544106354, -23.807281613508163, -9.129030581733126, -1.4130668502492885, -5.4745237459469065, -25.916485196938858, -18.274364776467422, 3.9106936664980996, 17.232118294125836, 12.968725492953528, -3.005864007241751, -6.184391250948466, 3.5308708635549615, -0.18800120260726377, -2.2710633271623673, 10.990813979078734, 4.921502466162304, 4.984703271058151, 11.14326786417461, 1.495198145135585, 10.865692648571198, 8.939442575018877, -5.188368478645114, 2.3233929594001754, 1.5823992054138791, -1.1250136508101258, 2.4496168002989185, -1.7773897227780657, 3.487566972104818, 1.7284810389478298, -6.009643677324005, 13.832741034295365, 6.750103196813988, 6.468058930371427, 10.728211716975332, 2.5262162575373006, 6.360638778956442, 8.031591205888025, -0.7809746925178334, 3.4748359914609717, 0.8602658926423421, -7.966311329886132, -1.101773184314164, 11.016900973348028, 17.269654386630346, -0.35364550872306655, -18.568673055850716, -11.27557367719841, -60.287282568261354, 119.87715110394346, -35.49716560213837, 148.7434629020592, 45.151105343138425, -704.4176051577743, 977.5670830363711, -10864.278874544423, 9660.336851320033, -57339.6113642928, 64621.12164622918, 612991.9041148119, 187505.9868119284, 50434.279539317766, -2767.012152521357, 6284.829812760196, -1412.151339124097, 1594.503557406429, -552.4332041475582, 283.99052994340434, -175.64183926721958, -28.57934921699459, -9.616467393314434, 33.855647905638044, 13.390364490376543, -3.8676110460084296, -19.131678517543516, -12.664865086183992, 2.203409544917647, 6.409321806018301, -2.128768525828878, 0.03276406635705102, -5.469790243148401, -6.273505735650294, -5.910579489551271, -10.033789505164602, -9.362608753380202, -0.856473113913965, -13.15569557605815, -5.144443371447201, 3.212897472272088, -2.340062192617178, -0.6181031252069388, -7.868339963365808, -2.603647391982794, 3.019653901353064, -5.881661981540501, 0.53338848667999, 4.078504114901498, -18.021099176995424, -2.028334485547257, -11.029201867000452, -12.311106984380618, 3.1130654091704755, -5.507862353311225, -8.537749095909641, -3.869552218203573, -4.6691583411094015, 4.754414007962112, 5.144699245216917, 3.334142969965204, -11.624781282297631, -19.93657705984092, 30.736785283106087, -67.66727126703363, 179.20396095149403, -125.32222622868117, -110.98660196536851, 407.4739976638304, -1655.551985062861, 3690.0203954213553, -7418.042742120173, 14632.818938190629, -13192.04734517501, -1846.0915557840885, 184961.25751794744, -381287.7024373155, -240585.3925098467, -101432.19263086248, -2584.175103594869, -7108.501180622156, 1604.2838855332254, -2668.359370920409, 683.7507687202257, -477.78579923730734, 228.3430650345384, 33.02470218613948, 76.58681943104722, -24.419046541049838, -21.942325010872462, -18.08173050182792, 8.674549580689721, 18.132376726224756, 13.535506128973356, -3.9780970187466407, -6.663188954786002, 1.1536462859285317, 5.448049675423869, 7.27621741132922, -0.4038582699410821, 10.375873263876269, 10.42469736060461, 6.671048285923659, 1.5787972633759966, 12.336946795560195, 2.6844297020050343, -1.6823587789703485, 2.013000220704357, 8.912021192521117, 9.459669197193753, 2.0591072585226495, -3.804198915616872, 7.912008108063563, -5.0886946265896, 1.332249466214986, 20.01090218269759, -1.5754823780550973, 14.9863445643114, 14.95470778318641, -10.702826289806437, 5.743447241215866, 8.674676887291588, 6.568521457266182, 5.508967203036374, -8.268214197173464, -1.7530560446098475, -8.551314664313107, 16.596228944986333, 15.248574930437133, -48.40512601139466, 134.41502408435232, -306.6467954108724, 355.2915733069564, -124.0462792342669, -213.37985884482234, 1835.3069544780992, -5107.374613285021, 10988.839311992106, -21566.313715707154, 22465.28918805052, 11945.549463547697, -140413.3668138652, 361578.24576193985, 126153.7521982199, 46607.81298780822, -21462.00654992069, 9794.453007559203, -4862.295072732947, 4317.174932565249, -1919.1550682908241, 1059.479957553506, -600.9930955002953, 138.7661214493991, -153.9716335242488, 91.42080927762463, -4.768869465936051, 29.555826716062764, -17.522525741705962, -14.816807411080866, -13.448771593802903, 6.402798119713411, 4.533235357228403, 0.4994392658823652, -10.025121187634925, -7.0203911875246, 2.040404525648247, -9.554524494135451, -11.543469141432864, -4.20648522814738, -2.944432485068831, -9.943131482467544, -1.2794637635371147, 0.6954165485694124, -5.721244601652935, -11.094280050574811, -11.400685732325895, -0.4850785318701988, 2.529459960643355, -9.213430442892912, 11.034246324224329, -11.484170395747567, -18.1920845878533, 6.364765082200885, -21.797289615476632, -18.806586725042393, 17.284741022885836, -4.225387773815186, -9.346936412051814, -8.056664392953621, -4.246316694455664, 5.125342893647058, 4.397206013702736, 11.64511053448349, -33.23794556338927, 36.718759494989804, -43.03279848814265, -17.625754044545015, 203.05050245600816, -415.5059799762903, 622.7168369258256, -1344.493874685469, 1661.8313909647109, -924.0304700151753, -4315.2647840578875, 23723.74021211669, -44755.70384021121, 31851.829329600914, 61560.231835303806, -263313.4249885421, -93469.42405319988, -32003.547932080925, 17955.727933101316, -7671.371650671455, 4372.171913387279, -3809.587161654735, 1862.344530628991, -1091.7597747033187, 673.1330234634314, -198.13596564448864, 193.04908145678405, -121.64351905196251, 21.62404363635152, -36.09126319802957, 24.583159189445873, 11.727243137717121, 12.006567435627346, -7.510410577867197, -2.4651204778383375, -1.6009903304826538, 16.575266134934946, 3.2164175230430727, -0.09398434021231857, 8.719726274823605, 11.633542590250567, 2.6212304261338613, 4.300056896809554, 7.205349216641313, 0.584595871256945, 6.585754612165129, 7.784793137583577, 12.682475845314276, 13.052847906866225, -3.000449941765387, 2.0747914894355177, 8.496214480144962, -16.624259237886434, 18.535700177678173, 14.865802986471113, -13.851400096632652, 28.04303928093106, 21.834591656767504, -20.513214067307466, -0.3625480871907622, 9.909178416517726, 6.644846885030062, 5.088632080323351, -2.6057090608091795, -6.944492541807445, -8.50926807108921, 24.028258720068823, -31.921661472421327, 53.93518313675965, -41.180799944349786, -86.454257570802, 240.6609843607849, -434.87181805954725, 1091.2143656719636, -1351.5091103968641, 1245.1257419959966, 889.9445144764535, -10999.958800600925, 16405.295758145567, -35605.90041329255, -10018.488082573098, 138780.2273610639, 63815.68051566069, 20469.44650190747, -8023.179924364537, 3406.961886993917, -1959.132249057874, 2019.859395187722, -942.3910042095473, 615.0530456936328, -401.90638843989746, 113.30832263117883, -134.71728466238426, 91.14213800594916, -10.0570630408302, 27.212237571950567, -23.904701692720835, -12.102565431512144, -6.232090186945281, 5.24097784143611, 0.9565736174195393, 3.0277305057362565, -17.96575902603674, 0.33312989934304044, -1.4364160250371292, -7.1636762311535955, -9.896723329462217, -1.573839112507143, -4.828072711466066, -4.647669185287998, -4.430730570910832, -8.35385781041577, -9.745439109678076, -14.397254974893908, -12.690077522223925, 7.541001398674941, -8.308699677346661, -4.623193727291135, 16.265305916315093, -20.117745672601593, -15.850073025272483, 22.32258514062219, -28.674314981907163, -28.125662222438606, 19.62865894930349, 8.508072893118467, -8.930166829706238, -3.407453495652062, -6.524529235248619, -0.28562395036885696, 14.68466004050764, -7.845496848059763, 10.614649954724873, -6.213632192780611, -40.55187461667538, 104.58690278510515, -151.82560494968124, 238.67908442405798, -339.87802096750585, -105.01524944669171, 941.6729844315269, -2777.693138738224, 5172.401895275977, -2976.059802725074, -1468.57216024634, 30768.5362334554, -27544.47414484147, -70788.88529604005, -38117.02915808569, -8184.124502234804, 1318.6769924899625, -474.52896152303964, -46.56626797685095, -487.969747949035, 128.43936692538682, -128.61749141298222, 111.04636886713062, 6.424459443124898, 40.08412291304369, -41.826481125025325, -11.738791753876288, -10.896057182315165, 18.325766985113734, 12.652746905914556, -1.6658545596991328, 0.9543126252064096, -3.331133224955805, -0.8859028871809206, 13.308346711876808, -0.4790673767949258, 2.5256492472104766, 6.411039051105144, 7.480395015805166, 0.9803379180810881, 4.262956887134013, -3.034318324456222, 6.355312499298475, 11.306813963946283, 10.823542837818861, 16.886430535155974, 9.318957888992168, -10.249351525703373, 15.54763984921547, -1.154744787199667, -11.653819874272157, 20.321912025168366, 20.85206478899527, -30.137421750554093, 22.37641579572903, 37.99898038044883, -14.082112740668268, -14.87415372131766, 3.7084880212973417, 0.734909435098249, 7.844137192300999, -0.2508201865901279, -15.944867143448763, 9.996838388674027, -23.64202986285083, 20.669747364410604, 30.72530823223844, -99.20896931611861, 185.48912517235019, -288.44139899446515, 434.57033557944266, -196.57800934776986, -475.4272718555742, 2009.4162947278585, -6118.834233421411, 6464.46306175662, -1259.479460741468, -21314.779660860717, 33749.12484011551, 41128.86814870678, 23474.050183661187, 1980.5267084971292, 816.0452742063798, -596.9376699263928, 790.2480194961213, -171.1986431119572, 219.09405717312956, -99.92285990272372, 39.37906778349911, -73.23070458350354, 19.40130214251695, 10.911342099646628, 24.11604087205898, -1.5153375527443893, -13.764299429794661, -9.308710857525156, 6.813805458380852, -10.004418121340658, 7.701289637145624, -3.8344271105972987, -9.116525122501754, -0.8576922153534339, -2.4957623342588717, -6.313095569371136, -5.04991073196104, -0.6734664221841001, 5.513788885613441, 1.914235666970995, -8.75519581437849, -14.635469481000758, -11.840612506771174, -19.492964784743137, -4.327669234308769, 7.489464986040454, -13.172740969285657, 2.9118861071850906, 8.585410376285186, -16.986593293363573, -28.6328420305762, 33.6898901010703, -6.553063426773693, -45.98826249743056, 3.6724269373081397, 15.052483253416248, 4.177997677893307, 0.6671500651802562, -10.051110539850972, 5.9327139366037365, 9.076034032025632, -0.6563310337512038, 24.93207983343973, -38.84917439192064, 19.727291129506185, -2.6839458854790097, -56.01838935610275, 190.81086553671364, -481.27268792167155, 590.1621582924001, -252.22937726148942, -829.5934126075741, 4271.458706334015, -4528.921719094196, 4025.5638174544083, 11148.478585267425, -24575.394593650497, -24845.008069925847, -14018.861172381501, -501.20652940176944, -831.352200536736, 570.5906627747521, -683.2799138893245, 241.1950888266117, -234.80973497001546, 116.46053678502332, -68.56556638271284, 75.94776172899938, -34.90389103616516, -2.2584556969402305, -23.336042120882688, 6.816958286534252, 12.047273083751168, 2.3826016383487305, -5.5212410905199105, 11.458960194277955, -8.198415439718893, 6.253680637924972, 5.585080103670781, 1.886526432526786, 1.8103931141566367, 5.80683839878849, 2.8266615780521622, 2.9310728903021186, -7.368074547876151, -1.2699749134462281, 12.065000577449734, 17.353572387063437, 13.834977231822428, 22.10411177889955, -0.4392046969761402, 0.1078477655119226, 2.902203441578998, 0.5103758193403826, -6.026148329513843, 7.98793792042057, 40.072408610008026, -30.28494228534813, -17.348026215353094, 44.00735541693616, 11.136510156649669, -8.11166818481998, -12.147472482713448, 1.3687474343758737, 7.980058997312755, -7.196613013926, -4.402843091418322, -11.579479372071647, -10.575547353167256, 25.49045747687096, -14.55594344441375, 42.22596133740276, -44.260752517569884, -20.285037992935873, 264.4714093160598, -537.2833413365867, 606.1336392481264, -103.42638624908355, -1717.9327711100411, 2761.8489851977583, -5001.834769004003, -3142.537105977507, 15826.898180528488, 15081.899480062586, 7614.881335398265, 396.69266224456, 393.6208623126861, -244.14129482980428, 364.52501667574916, -127.47717229601663, 139.4042223134961, -57.85983620078037, 45.090469225367094, -49.51973498350021, 29.355931011003015, 4.363607976935871, 15.909744991719434, -8.298938291744548, -12.519889722422384, 4.954497320818088, -0.6921578865156798, -6.198098717283995, 6.0977765748646675, -6.963676742284577, -4.001381760705748, -2.124693604342804, -1.3307247530472772, -4.567891748176616, -6.49480661259391, -1.9357210143217312, 8.798805461592085, -0.00029917319458240365, -15.924129068411533, -18.341199359675954, -18.196030064635618, -25.80624617793958, -1.6757037760031497, -6.19649455288626, 7.630812948295591, -7.837155039276845, 3.934639147877704, 4.499672496754712, -51.43632073269869, 18.838704611943193, 41.69615979922138, -28.82580210333566, -27.641507652468377, -3.7903093585056364, 15.206865670610345, -3.8579102586268608, -4.456415030153767, 3.8993096024475835, 8.326929263177409, 9.6702691092764, 12.387663189389908, -19.96473830651457, -21.092975651938815, 23.858895241574402, -23.19636847133641, -27.94913992571503, 33.05014322094764, 97.10042106057483, -526.4605292076149, 555.4387408801574, -85.83311674125105, -1378.3093285135815, 4730.579094469043, -845.9465673431097, -10267.738783115845, -9361.103052130202, -3844.29730984451, -398.172555790451, -69.01638949378452, -0.04433233556823378, -125.97127653440496, 22.34361164779212, -54.586281794793805, 2.803915942336055, -14.774008072656741, 21.517392201301572, -19.6640272312062, -7.4380151061124415, -8.420664312120946, 10.322513143612019, 15.846656311937055, -4.958186704166782, 5.795614257181984, 1.6447496440543308, -2.9228345281694335, 5.6117884518380725, 3.8208813161295323, 1.552983777236481, 1.089075704001421, 0.2608149464853083, 8.91097769102829, 0.5259271586413679, -8.796009456786688, 2.4098670074711195, 20.137272708709194, 17.5530240804308, 27.01262855435923, 27.281418838961294, 2.8039411904881257, 10.052570579119525, -19.899298687786896, 14.771831565539475, 3.602421494838801, -22.76445230153882, 55.434567581302694, 2.3482763298247216, -56.52180061254498, 2.4156077807588954, 38.35015410371494, 14.152791899618784, -12.336390513277234, 6.262277981332902, 0.9382952736418847, -2.1596620541298317, -8.867572154552017, -12.573962080761435, -9.91036128682474, 23.178608735173533, 20.015126073865332, -27.897546541926854, 38.04050429683899, 40.74571437108814, -143.45446305216896, 176.3510561229884, 499.65575005457987, -667.9538607835633, 858.117991907281, 326.0217722889555, -3773.3744493652216, 2016.6771971211615, 6879.53092214473, 5931.463425814544, 1901.6257147010767, 311.8834932712269, -74.35814982156556, 99.52661889240866, 7.692530900433519, 31.374157092612826, 9.215441330581626, 27.05966992909854, -5.469454904360637, -2.1679172647988665, 11.504048459674893, 8.272670331884452, 2.8105011259677477, -16.650001236601167, -19.264958431336208, 3.308525826334527, -7.662800871957086, 1.6021912186871055, 0.7107003298815783, -3.8775860946725946, -3.4098226484174683, -0.6352517595556807, 6.0934701729968666, -1.5508528746674821, -12.663719196146594, 2.03337382757011, 6.992919032766611, -6.560583620662686, -23.2858102733598, -17.244677486308948, -34.862650354888686, -17.36423434407276, -3.2060118571234915, -12.122632772414923, 31.43926472997648, -21.018613712496354, -15.575285813399695, 45.1953427295479, -50.89507970190987, -32.44404398915065, 56.25469526153102, 29.163223716103282, -35.6312157621931, -17.24163424758684, 6.499577608578568, -9.05015815088685, 3.9356887830548675, 2.483136007077483, 0.2870827992906317, 34.08889970155017, -17.441981920769194, -14.849076623231625, 11.211024407328969, -49.689298683973284, 31.260086797522952, -45.20544730681791, 97.32324234163818, -338.19432142085407, -363.85047441475945, 586.3103893382687, -936.0051606671358, 309.66381506630177, 2547.0295906602646, -2020.4025351629575, -4702.715749371677, -3757.8262648676255, -978.0817724028258, -196.9051289733559, 96.27599351735653, -104.91042771673807, 29.506831391469532, -44.42433785352457, 4.260770200200234, -33.6855822961914, 13.674844505994647, -7.901967476302044, -4.5175625560986115, -8.166440831431812, 1.8909758433590165, 23.211227336056744, 11.868166695761996, -0.2598223771357907, 7.199223211040882, -2.3314926372748697, 0.3011647382892016, 2.659336065206717, 2.585018828950795, -10.176793711115288, -5.4362196984150435, 3.700120529372998, 16.304638561985584, -3.820195270629421, -4.619816108411887, 12.087587316831772, 24.98158203600499, 13.265017059434847, 34.92587105775968, 8.20848906354708, 0.3420119920956047, 14.517223147132361, -39.233095792090154, 22.838420096687567, 28.277292945485083, -62.47255437420702, 37.80983564579775, 63.71221791283831, -40.60705426045664, -52.27630087090922, 22.23809586279663, 8.936852841526353, 5.198740562063181, 5.856733890688079, -7.420825559639744, 7.348372440689603, -19.038001026588947, -15.236543843743132, 10.40667362836928, 0.2581217901183676, 40.011168387080275, -14.228529651816052, 12.122051837010117, 58.57751168994682, -116.38709531448986, 307.67655844992953, 82.0409876584679, -427.48363820471366, 769.694307108181, -572.8496056773689, -1481.9670198603942, 1692.3891575038604, 3224.1946994106966, 2334.5628532823243, 539.2578823250078, 99.79851966287671, -66.07620500229206, 72.5057806481612, -27.723058093259592, 37.361845034811, -0.5495750837760123, 24.89399585763631, -11.98267352510717, 9.511231581810003, -0.39298379246627735, 7.893748453132626, -3.205869475056354, -20.9296511044314, -5.823375354165367, -2.548310878982423, -5.253949673463076, 1.3007452021059187, -0.37658188090582867, -1.9426897442542788, 6.714175108481619, 10.254755861355862, 5.665716921656567, -6.878839875702287, -19.55340330721278, 5.1061583635224075, 1.981928619931099, -17.08875468585146, -18.76190101420737, -14.739660421129818, -32.79006635211661, 1.766882582784564, 5.299698873724218, -18.008637947007085, 43.849440002530045, -18.01552010337252, -40.45800452477133, 69.47662518872161, -18.053017406990826, -82.39065510464845, 16.386823921405952, 53.51571386733829, -2.4748657769407734, 1.4563945986151277, -15.367961369423863, 1.7038214676211998, 0.7960879451963411, -6.320203922522891, 29.531789458815872, -0.08367118115868265, -4.397574338179268, 7.6045125532747475, -57.72865654709315, 6.476626226892316, 18.522051665363172, -28.761859654288983, 26.292438064634954, -193.98083041147532, 232.26554224447835, 299.6080718430081, -589.9422365900305, 602.921216565758, 753.441136324568, -1330.3843972056773, -2208.8063768611846, -1413.9603685547368, -313.1760430362791, -31.07840383537456, 28.1949020097251, -35.75276830641553, 13.254297386191375, -24.778387996472805, -7.94414863589118, -10.390470084515256, 5.695028503529315, -6.943701848733911, 2.7706065838930667, -11.367605775774106, 7.269296640331096, 16.294492849582376, 1.5471114222855673, 3.2815471703768138, 3.5324256956790614, -0.500253930378529, 0.07742208351482312, -4.11735665463095, -7.719229015392803, -10.15930403864997, -6.0580072843519766, 13.686440010618227, 18.149862960287603, -4.4801421586333845, -1.5832035296180311, 17.816266870811436, 16.33161439148444, 16.940521586663937, 26.367594842504992, -8.48529979952396, -11.470371276969644, 19.720836162679948, -45.23912356345339, 12.08858425955641, 49.00588255336856, -65.68964379780563, -4.921218419017294, 77.86815331739786, 8.611236327272298, -32.64021309247393, -14.90094266530778, -5.112016754176886, 11.94854296771108, 0.5967597743918138, 5.433484130218841, -10.723207680989628, -7.324411151885928, -15.766020689245545, 2.492283385477231, 17.888202943099998, 46.52812792005593, -29.278923576741594, -15.263060248937727, 51.565806570022296, -119.42935746071022, 4.533627089397239, -337.9048069880758, -27.44540161347557, 473.19362205994327, -501.71200903974545, -345.94148128040297, 1014.4203504136821, 1493.6938452983827, 842.395498494954, 183.73674783564482, -4.901216008748269, -3.6828763067659565, 9.582836905525578, 1.0096431188220623, 14.419886803614848, 12.167592611664357, -0.1365914770359279, 0.20716272594191423, 5.918868339872109, -0.755433172544053, 11.485934776780406, -10.359126154961189, -11.012095558438599, 0.03864722043451558, -2.678189898539363, -2.2469355182325703, 0.19315143509905872, 2.2952188983030974, 4.273447247831362, 8.962184605244312, 9.036715627501874, 4.85795275643341, -23.28453384237947, -13.51371358864443, 3.1386385564100547, 2.839553613647385, -16.79842898433705, -19.220923242770375, -16.760537217439836, -18.387618040511864, 12.273140378562132, 17.435978932988913, -17.384641868579447, 38.39768767393157, -9.240549272016315, -45.97564296384611, 51.54356601868342, 23.83567661550767, -53.50513350933633, -24.331791467748697, 4.2322395170882885, 18.870049805026554, 10.285228729305008, -10.833115429111105, -4.7394607779128, 3.332716610647175, -1.848544001367395, 14.071538238073305, 21.789878235265792, -20.446236266015323, -23.725595789596667, -14.056310474926239, 31.904824872881232, -43.3053556203782, 96.23428121325114, 166.2757704000497, -11.259741912012938, 220.13069437738991, -107.33720178928658, -308.27256728209363, 365.32167638430735, 74.65516704160491, -787.7184324101835, -1001.6444173466263, -492.4132518935566, -105.53622114923117, 17.763380086364563, -8.097343586226032, 3.4922186834101994, -10.132416017132943, -8.622619313779612, -8.540139011610844, 4.8477190962988725, -5.966051744566336, -7.7129652834219655, -3.379886761681517, -6.788570207697708, 12.01289209650742, 6.909325018324946, -0.3401094465377929, 1.8877079566450021, 1.437554109942319, -0.4965001027353081, -2.4631202800227086, -5.503195155073648, -9.025006721329644, -7.360226849981247, -0.9212537078430828, 31.1747699290737, 6.101947364966319, -4.036782037995184, 0.681083697085381, 17.684566543563022, 22.769354759039665, 14.874658909401926, 9.271789525717931, -15.8586067556233, -20.631676258499233, 12.790112373337672, -24.005405255468045, 7.7539002835977335, 31.190074899154666, -28.643016922695818, -29.53947137916169, 21.34557659983619, 23.716965111443, 22.641974716817266, -20.584708852540206, -6.799810510100486, 12.408831877713654, -10.824870494441987, 15.201087103142074, 0.42226509097584797, -40.04710395073722, 23.458195820074128, 1.972626715478636, 1.5078517265585665, 37.592792246427074, -32.831232012237706, -24.442224060658543, -123.05651884255013, -66.1547496731675, 77.13964432524861, -103.59636365366681, 169.94635487469375, 141.4451133461647, -276.72482111638635, 53.20724721556502, 606.8402246567389, 662.3521731889306, 283.82427530624136, 54.965113472177556, -18.003832906220996, 10.286974038730285, -5.296049708593128, 12.126471722012212, 7.03773935577442, 0.45169846835747957, -1.7038292309124248, 11.940963751749402, 5.124545080058316, 4.387966457690086, 0.9062600577358529, -11.310862970089225, -4.234879050718306, 0.30543480019395436, -1.1792710971066493, -0.02576985602816561, 1.0687035362301025, 2.4986445672523483, 7.177283186275085, 6.9489759019492885, 6.526181896479224, -5.5567929045716555, -32.09180929336649, 1.9922375141874467, 2.176628649264937, -5.709378822781728, -14.684210737010321, -28.916034988461213, -10.298214394574456, 3.783483833170449, 15.274074057692982, 18.725813353556486, -5.0291650017142, 7.224723608598208, -8.73569679638485, -6.403636143788354, 3.55130388794907, 24.848342494107392, 9.668909318310508, -17.871283680473102, -26.323919820108813, 15.683448296211445, -10.145167035650418, 16.649228254132964, -13.326265921937429, -9.804755547593352, 23.851720951195475, -5.589346818136235, 8.114507871077887, -1.1569161918521378, -17.918962105780068, -23.858875059810632, 51.655489903199076, 83.1639048940294, 34.71772099006614, -68.1542721510801, -86.35114498825384, 71.8672979640062, -172.91201019629673, -30.38141418805809, 211.23809094244302, -86.34086423645881, -448.3851075692035, -429.0513396771671, -163.33105203329666, -21.46788919300507, 11.922855266431677, -7.722077866216764, 2.854948430491167, -11.451121182502602, -5.010779426852556, 4.612285970211029, -5.253976552255057, -13.020340685660099, -3.8346171809482086, -3.507088744619868, 2.847061316071247, 9.461190771933703, 2.3896122744393393, -0.26445871920378294, -0.5045214049491964, 0.3778055050519453, -1.9476021500303662, -2.8934739437942074, -8.873693232165222, -3.398893860492872, -10.819815833398291, 13.719721168152866, 18.902544362635656, -2.756360770909542, -1.285917571293601, 8.604198368598434, 13.243667345347362, 31.264685678272016, 0.8116904487319282, -15.883489165434781, -11.292048309169765, -16.483621561072795, -5.6955957006797036, 9.217917692909941, 7.9200766187835745, -20.416925598027447, 13.32270571807068, -23.04362629213774, -21.695523239036447, 3.3703096574526925, 16.461461296825398, 0.6102008183284692, 1.4217663357407517, -15.898040227871896, 27.814203348060836, -23.487783832156495, 7.228851645419186, 8.58958698635195, -51.79156644293619, 34.22613784343034, 34.531680106149835, -47.21607705372475, -47.44320522809219, -24.215800021645855, 44.1267371495303, 135.23492099846715, 27.99903638790975, -56.91965694322242, 137.1303461266998, -3.1721089369636237, -149.26178317987697, 87.36546811198032, 314.74856557282453, 271.75176940395403, 89.62815752081009, 3.825933115504533, -3.059726371790037, 2.5015338789030683, -0.05374854752256976, 14.904930425581142, -0.622993928340379, 0.4344255444029725, 7.412330707239071, 11.123678333217375, 2.8779052533739162, 2.076336901723982, -4.523709324943444, -7.132079039295296, -1.1231313255006614, -0.1251208323645899, 0.5765600092015335, -0.47936378848697797, 2.6278198131783244, 3.642993761833407, 10.402059811333162, 2.1884540410454605, 25.562798929861867, -17.37330718727552, -7.362865994462266, 3.8482129221688157, 2.500498688325507, -9.746491558430124, -9.66852361484661, -30.40716845411883, 9.977106611582752, 25.375741558395426, 10.955829674271977, 13.294143759446273, 12.601361247181407, -15.102487053677551, -0.5146972588457762, 36.07629564802083, -14.979207115170247, 20.586571428255855, 18.827397095231643, 10.927815550189596, -17.32469481672379, 5.5916110512632695, -3.3485701760822817, -1.5226564548565098, 6.550108046014285, -4.592128843863917, -13.841583604926903, 33.98455944678156, 8.961285590546911, -19.040801827731524, -2.2520039689358637, 67.37600032292121, -10.416213983739679, -37.8905320007319, -68.82303244457943, -139.97759709818052, 17.01535278806709, 33.05560884900648, -77.57730870297138, 13.315421354614406, 97.62727754917276, -86.46398475841093, -214.82610186213276, -165.87551786301287, -40.745385276072085, -3.992729500302654, 0.2679283178008053, 0.04693371909790078, -7.412531754498781, -23.030163570324685, 1.387044777824523, -4.141463199811873, -8.39654568113704, -8.456869713419927, -2.0904414881346693, -0.9430034146603861, 4.746745584793083, 4.58549924776611, -0.1795843021965147, 0.03373093814930345, -0.4782400976070612, 0.20420627327789623, -2.574246791746843, -4.7349546049635185, -10.695026510048798, -5.6977536031791605, -29.62682430509215, 20.783761143306496, -3.0513956094483032, 5.429873261327201, -2.0037902575377764, 6.397839535886921, 11.465263464215353, 28.756680999177746, -17.861804526021636, -26.150200611373705, -5.98125930560783, -7.588449154608537, -17.637599801494535, 1.961137367704897, -2.9565160201892784, -33.56142962681558, 17.49835927589781, -16.432080507913486, -10.984794923849195, -7.473126366392642, 7.187756853942487, -5.618425038805205, 14.245664475431761, -13.745767381555739, 1.8577446993217368, 13.817173139888748, -5.267359865773609, -23.09345244105987, 17.3037120776519, -18.387013217423686, -24.666258375955476, -23.349007047551137, 42.95795959993935, 66.3241141511344, 51.66179790145246, 84.95912815922745, -44.87020147709551, -4.854789312106823, 17.516997576587404, -38.42085762341263, -63.55959441372204, 86.60684042192514, 137.54622290964076, 95.15662219151392, 15.818541625405327, 5.685779552308521, -1.9470372839573378, 5.42742555172259, 14.177931562029146, 27.205639869143138, -0.15327114587388044, 4.431609766887573, 7.796868730826877, 6.101681421742523, 1.4763247585956802, 0.29401066502636375, -3.671297564554302, 0.0905302993711896, 0.017933886491825684, 0.10298139909256616, 0.41478410861847476, 0.2923201818973283, 2.374215959778924, 5.99398985721284, 12.408406020932542, 5.937812254449268, 12.631333511055422, -15.698862946476241, 0.574113762271562, -18.114443699473657, -6.515544687537279, -6.046466715354102, -17.157447052769665, -26.57667355757033, 15.14168041636882, 12.610999878938257, -3.62698223276953, 5.789596531164619, 7.782084969754447, -3.960070252470326, -5.632016708039493, 12.746517408856313, -17.083773419332225, 13.66051478461051, -0.3247359582603285, 8.552194365102638, -3.035725868639953, -3.6323610257056735, -2.9594006423360497, 11.70712499682571, -14.814899648051718, 3.67266950147507, 5.098992364023819, -4.52195020917574, -14.042569920320084, 42.40307086268145, 5.334964328999939, -27.071559749809328, -31.52673058956584, -43.53380707360568, -32.54966766320806, -7.603166634942414, 50.734186968694, -13.570265032028335, 27.129388035079074, 66.43708499128108, 39.30429073385438, -71.60794971234657, -74.59145359447193, -52.101361488831074, -4.592789266391059, 2.2263817829012535, -4.549368315109204, -10.046853277748454, -18.231126808781035, -19.399915818336563, 0.3897790890750356, -2.776823724917455, -6.413567283552903, -4.026146154934572, -1.2129386069676318, -0.11163184654855957, -0.11976477851510649, -0.11843885643691651, 0.07160132632866083, -0.07258650991058237, -0.683399059598544, -0.20322006671980525, -2.527371270586654, -9.096643671957121, -2.187325007558372, -2.059760468384689, 2.1793185305014244, 13.219370018854901, 3.1020073298700064, 38.474439842783624, 14.87346440143968, 4.921848296249132, 28.80146074079056, 32.803913444518614, -16.747721850851235, 15.019133365825114, 16.231569950597027, -3.0938099736350644, 10.143089619521758, 6.1976601466193735, 9.580824234577719, 2.93083179930875, 11.110710394162348, -4.850859770116536, -0.5147887120616356, -10.917378850057778, 11.311018453655164, -1.9444148001855934, -2.0386402458493826, 0.9451610469786095, 5.4177049118076175, -6.795337771462089, 5.747295235889573, 15.853943837738214, 11.10457986557486, -28.832970525998523, 20.59384583323181, 51.88111565840217, -11.26616097670742, -1.2983465208099034, 23.401785367590783, -43.87829599547721, -39.71546202843616, 5.378514081854062, -49.24826487775263, -70.92371469307702, -18.0089683028175, 41.02889996682417, 38.45514624712432, 21.101060949963603, -7.1436041096120535, 0.8818428436430374, 7.256485766504508, 6.239030781462592, 20.967409064059144, 12.861759211246364, -1.3717207886303562, 1.8681796696261197, 4.873003671136534, 2.6000392856188705, 0.8519122838731135, 0.08558831403462121, 0.06635074476062182, 0.2255225703505195, -0.23574017608178824, 0.18569926818517404, 0.4319674834489657, -0.5933292880536043, 4.694080307150631, -2.48657884909921, -3.1823111502533568, -8.021456161673878, -18.852227685332565, -13.312930296698731, -11.424750558693063, -67.39844663827617, -27.368961371403852, 4.194821703240318, -49.517584950039335, -51.91799688464874, 12.946941843648958, -45.34804233056817, -27.50183715657261, 1.0247051293848417, -23.21228336023889, 10.410972680441661, 9.458786511505608, 7.7209395645381, -11.76237357087279, -1.6742350657351066, 4.811677456965436, 7.04050906173431, -11.698059586368972, 4.867874046977962, 1.0966184698711516, -5.619951238304733, -1.1468816766715382, 5.072748769042872, -20.4787950030432, -18.15769519789359, -18.38143484539935, 9.60585727743181, -27.625070005762996, -42.92538625946215, 64.84874325899624, 37.102799504834614, -8.435044496313921, 63.502924023379656, 32.79436363967929, 12.845620117877042, 69.08547256155984, 61.46958464946772, 6.683856471056261, -20.233163828805473, -11.625932128531142, 6.487264798561586, 9.799250484626953, -3.2843190629641583, 1.9200126897613092, -7.082907562441113, -20.500202978936706, -7.186418806770071, 2.4320246245382977, -1.2229310446202621, -3.5666229943637036, -1.325119879956255, 0.005354364927453017, -0.024257501820973998, -0.057193848972378045, -0.2803476743592225, 0.31013307132383167, -0.12964828217935004, -0.029824825758178474, 1.18670845050969, 0.3337855705110962, 6.385337088912856, 3.0485999787473816, 18.782297633096512, 15.057706740508072, 10.891226889954265, 32.62190466024071, 75.1428778014713, 12.21932089101654, 6.5799131471956995, 70.91500721900341, 23.20454512225439, 12.26185594698913, 70.3748157619489, 5.677083586412147, 5.771883243463467, 19.455932997798396, -21.356458579590576, -29.958349905781024, -18.895096823719268, 15.962296496583264, -0.2718468092197373, -3.0722498195652546, -4.24578599842663, 8.254768830436738, -5.416944099270743, 1.0509789697833831, 5.904688123072435, -1.70152751966742, 0.06868221072279158, 20.851379326231477, 17.615870178987887, 24.190309966417324, -0.47804405210445267, 26.582942848786, -7.040793098944479, -71.02013708221654, -12.886724095938801, -25.573340777373943, -51.145187772380446, -21.212979325089254, -27.939741989469773, -69.41979832745939, -38.2319158150882, -0.004931584304217473, 9.014649360258172, -9.311570620165586, -9.516596670426283, -7.749148672046782, -0.951562065041518, -0.9956339527975748, 10.677301781373385, 18.059675262608728, 4.224717932424484, -3.042320901226516, 0.8035402883156827, 2.19557106530364, 0.04049190247033609, 0.02286454045728037, -0.010877662208590858, 0.0489095308994753, 0.2716740101350238, -0.3603248626216616, 0.2667092835386554, -0.15907369566858925, -2.07615299679157, 2.2730711319585644, -4.024672479107777, 0.8121302312852532, -14.420658506221766, -16.07537896987571, -11.319423174918029, -30.909644930960877, -57.59098904900156, -7.944201324392173, -12.9948549942833, -62.54683303648092, -10.928189093209513, -10.545899718987574, -57.65322807660734, 6.667985841030606, 5.639372600832703, -17.623137806357445, 15.076453982485924, 28.63538922352347, 21.57431850425368, -10.948894976437034, -1.13693735420699, -0.16764828758816294, 4.072571519812789, -6.825158815204328, 5.685061463550555, -2.323037753796875, -3.8224914587940693, -3.3975168626430614, 0.7183176117205056, -10.677665247757524, -13.326681539945948, -17.154038784391734, -1.941953539895895, -17.599439642882746, 16.15824287952732, 45.09250906925689, 1.5088255518908567, 21.876272796501063, 36.18864321549387, 10.277239748918909, 21.44846587325475, 52.23626461879992, 19.081606086438637, -2.596102338455226, -8.285959941729448, 10.979023278489015, 11.268795917216018, 5.64312216972617, -0.2289314379528635, -0.04958145511190648, -13.888099386836586, -14.242690640404007, -2.841581020729466, 2.9683097913138785, -0.6813355559660257, 0.016813405991878357, -0.017346650488843, -0.07724486441427034, 0.034173070698672536, 0.0011650243450179642, -0.3597954945977072, 0.5670109481193658, -0.9726422280388296, 1.5165837275900464, 0.6459381475589211, -5.473977920189655, 5.1110117928840575, -4.883274311726506, 14.543196605166598, 13.350628525567963, 14.148062098737817, 24.230709978967898, 44.45219313005558, 3.625902321504051, 20.576974960434278, 48.52083206677356, 4.588444975431604, 7.7553978475772025, 43.62289126264694, -11.55616013293395, -10.970738963097212, 18.020311232364914, -7.570562011919711, -16.527052228297084, -22.848759958368166, 4.898999424891567, 0.8370037824764976, 2.7231090309508597, -3.370508342247554, 5.319983895348784, -5.220157963743307, 2.3732368436266236, 0.8991424360068495, 8.393816123355881, -5.4345082947660766, 6.978287980745702, 8.277706665094179, 8.924845759633898, 3.8077058486546114, 17.84969545903999, -17.59429017042158, -28.047954154081253, 9.415573921617247, -19.4603770190042, -24.914318748991725, -9.142504882828181, -9.667054310847142, -37.80958002529997, -8.81417134459468, -1.1321362882551727, 9.11286294637905, -14.559241407334188, -10.565467259006736, -3.804672106306951, 1.8185582369279583, 0.9048382148764617, 14.161828235068219, 10.539170897560243, 1.6326267386325326, -2.409932603312094, 0.04991480176152612, -0.034573060429635774, -0.02166443972934407, 0.177264754794361, -0.23817043250287007, 0.3378171373501911, 0.011180245424542911, -0.6272914629688655, 2.259230176266318, -3.471044240263195, 3.275095237421534, 5.1133511765975355, -4.289491722375411, 4.763945491874505, -15.159581241323643, -7.162315232730538, -15.013565688472646, -20.06100911863866, -31.684120206518998, 0.0464801999273427, -26.473933399794426, -32.724220230597666, -1.1301978171641784, -5.070903737475513, -29.49888587710778, 10.322619280959321, 7.142707956300583, -15.559479603484048, -1.0258051479724914, 3.30914272169984, 18.213394996735683, -0.6529990030500564, 1.2590999788778066, -4.1020667481342485, 1.798800173794537, -3.0837997755437634, 3.226315417356413, -1.0295001891847577, 0.080455095587876, -9.054645696616975, 7.969915433932785, -8.557810334755503, -2.280248263407136, -1.1163540852094196, -9.415847502071237, -18.707542495784455, 18.68392084446524, 13.379413771446691, -15.141268121408343, 18.55296276229393, 18.88242123748057, 9.81802714190775, 5.106369862171715, 25.57587252265896, 4.516989585299445, 3.1071733598905387, -5.298008646859562, 17.35471127985603, 7.555180072596727, 2.756895553914314, -2.732299289810646, -2.199118239788917, -12.546563927857498, -7.322987581259652, -0.8268789451966525, -0.03858113319911793, -0.03486228294176184, 0.04753403279518575, -0.00907131411200853, -0.06997866368531919, 0.15041914332916648, -0.3825896206022692, 0.3125414986504806, -0.2515103635979308, -0.9618778978179389, 2.750708882875605, -4.2034121099195705, -1.5330664175768893, 2.4219931024349473, 1.4837994590661645, 14.493676176718152, 2.499419942646848, 14.192727613342644, 25.125736637539884, 20.125611129376185, -1.7323125507274755, 31.12694404644846, 21.03991579416043, 2.589435209533911, 9.299795879342104, 16.196375687920636, -5.457905922178489, -3.8701242752703457, 7.271920333335557, 2.5189395247230513, 1.2424160282293126, -9.130763075830952, -1.3203641668708062, -2.621491178531651, 3.629030893080633, -0.07509168048620045, 1.1909052201501011, -1.6304564697078938, 0.37087447533541107, 0.20363524429342408, 6.600207665974487, -4.2460436013339145, 13.520212077824038, -0.48059343407073696, -7.164509620960035, 16.81637516177685, 11.245804011809094, -23.229688937363395, -0.7502736865837628, 8.060802372431098, -18.958832909799085, -13.371341784697174, -11.31571937869141, -12.581508229805959, -18.74118429712635, -3.3537795757080815, -3.447976589050072, -4.572167323418702, -17.288012637779886, -5.635099184247143, -1.955799928166252, 2.4011420599684627, 3.3964864198943103, 10.00156857530094, 4.413358249239557, -0.030286195762556287, 0.046383266150491646, 0.016376866068601, -0.046406762218775224, 0.04174168285036566, -0.032305030406261655, -0.11286001810426766, 0.3596473093159082, -0.2136233426723067, 0.17693701635254624, 0.21862508969887012, -1.465493075339751, 2.0319942677428413, 0.2973182490674972, -2.186650956252865, -7.999392746104902, -14.036425078174862, 2.5279217938305245, -15.994821927572614, -30.374623882506434, -10.518639456851327, -2.8085347017478592, -31.295703426645723, -11.90215583342295, -8.87502990157224, -14.799188385749227, -9.321160062039134, -0.44878753846765185, 2.2618244199877604, -0.7096764318604154, 1.1812987102324128, 1.1302789320430244, 3.4130321524004743, 1.4947041116936446, 3.1257023926917253, -2.28680132320396, -1.6025377731512673, 0.35328944121362477, 1.298886415314449, -0.8176556097682344, -0.8539847389501469, -2.601928858276188, -0.7836785347196001, -18.778647247190744, 3.675246126538612, 7.9575575276625505, -22.31041873666751, 2.5383615974912757, 21.34924400909555, -4.958086405040554, 4.009520236012346, 20.08950846132849, 7.776803463600429, 12.803363887163226, 21.29268729898465, 16.11078979018867, 3.9946257871024806, 5.058796428781574, 12.359850971714637, 16.126817927003124, 5.406764350672354, 0.9797420650844809, -1.5281960254200992, -3.713323603178262, -6.522856270948019, -0.041436752219606986, 0.03620409333691191, -0.06780925154312355, 0.021331111989612354, 0.016338493835206264, -0.055626757645283145, 0.09509797579426053, 0.08841749604846214, -0.14412720554970146, -0.01298187303729655, 0.12958624074459307, -0.005675850662051798, 0.1429847615428003, -0.5705508186420732, -0.31776956443743326, 2.2938965797136914, 11.898864392687296, 11.804582597700938, -5.009104613717343, 17.792936223908658, 30.238615334108292, 4.972401265799359, 8.146082176285766, 26.723555474190455, 7.082912105555089, 11.773339753426978, 15.737501304937318, 7.909672956276368, 4.365154395636344, -0.15586305437914708, -1.1283720067953182, -3.027782741326523, -3.2725231090569284, -1.2333315489178787, -1.875683751420496, -2.6066697154149767, 1.652552381944648, 1.997679770684074, -0.9657723917626836, -0.9955371475098961, 0.901707863651162, 1.1256807571722494, -1.3153891534546271, 4.50831619807381, 17.83709119310369, -6.838186473659379, -2.139282554381409, 21.66838832051606, -10.064190439225106, -14.498098544310256, 5.857911811582486, -10.357884597364604, -17.638122497464728, -4.488622687156108, -11.771329663128254, -22.10781619550278, -13.204428927739476, -3.945071790026082, -5.968381461127442, -14.5182093121086, -13.041374838460719, -5.2210343109761705, 0.06243544294956803, 0.4707504791451749, 2.8132638519830873, -0.02212921996705791, -0.003827449436169664, -0.01628579664388745, 0.07877874834922015, -0.053735793997437845, -0.0030266268025146885, 0.09718290600571376, -0.11991719303067105, -0.18574129734087075, 0.06915872913180078, -0.16626656073861737, -0.15949374669594202, 0.14284471624284392, 0.22169770837238081, 0.05858672963375552, 0.5800711270855471, -3.773974476762496, -14.202438365998127, -6.505426669413802, 4.223210762029528, -21.572786124367397, -24.30261066833195, -1.8726136756612222, -13.132920323105099, -19.37393314650117, -5.180721355610899, -12.496525572432281, -12.883714154920668, -7.323529793358086, -4.990865457257191, -0.7949836518172736, 1.6891056670197806, 3.1798928896829173, 2.8157833461175024, 0.5135279135988319, 2.289934193202004, 1.51953549919644, -1.5518022896674357, -1.3730759534467671, 0.9663881768865372, 0.27005551339903966, -0.5959433552280815, -0.890605316801292, 3.1408779491729737, -6.814438465479008, -12.410006407332874, 8.069605178713745, -4.089822292630435, -15.553095893225537, 11.409850141807999, 7.341842595929254, -3.9680895533331357, 11.053923662694963, 12.658106276340819, 2.7719820918571854, 10.148201409936611, 17.519788333362097, 9.359182438918843, 2.946988011939604, 6.0684040168911215, 12.21698856954677, 9.59231603897828, 3.8343054544289785, -0.08394915533451769, 0.04673825031323043] \ ,relativeTolerance,absoluteTolerance,requireSameLength=False) del data f.close() print "numFailures:",numFailures exit(numFailures > 0)
664.794118
42,315
0.820732
4,567
45,206
8.121305
0.5739
0.000809
0.002265
0.004206
0.00852
0.006525
0.005311
0.004044
0.004044
0.004044
0
0.834684
0.055302
45,206
67
42,316
674.716418
0.033813
0.014025
0
0.217391
0
0
0.002177
0.000763
0
0
0
0
0
0
null
null
0
0
null
null
0.021739
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
bbe8c803a83dffd2b3a2f1bab9d9019db789e702
346
py
Python
model/__init__.py
Jueast/VLAE_Pytorch
8373390008d611909997e4a3de8396f617d53a49
[ "MIT" ]
null
null
null
model/__init__.py
Jueast/VLAE_Pytorch
8373390008d611909997e4a3de8396f617d53a49
[ "MIT" ]
null
null
null
model/__init__.py
Jueast/VLAE_Pytorch
8373390008d611909997e4a3de8396f617d53a49
[ "MIT" ]
null
null
null
try: # Works for python 3 from model.abstract_VAE import * from model.flat_VAE import NaiveVAE, BetaVAE, MMDVAE from model.VLAE import VLAE, MMDVLAE, CNNVLAE from model.VAEGAN import VAEGAN except: # Works for python 2 from abstract_VAE import * from flat_VAE import NaiveVAE, BetaVAE from VLAE import VLAE, CNNVLAE
38.444444
56
0.734104
50
346
5
0.4
0.144
0.112
0.168
0.224
0
0
0
0
0
0
0.007407
0.219653
346
9
57
38.444444
0.918519
0.106936
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.777778
0
0.777778
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
bbf143f66e157fac98547bafc00821b89194b690
223
py
Python
homebot/formatter/__init__.py
HazardDede/homebot
7a44f5470bdd84c1e7660cf48955d44a9e4c317a
[ "MIT" ]
null
null
null
homebot/formatter/__init__.py
HazardDede/homebot
7a44f5470bdd84c1e7660cf48955d44a9e4c317a
[ "MIT" ]
null
null
null
homebot/formatter/__init__.py
HazardDede/homebot
7a44f5470bdd84c1e7660cf48955d44a9e4c317a
[ "MIT" ]
null
null
null
"""Formatter package.""" from homebot.formatter.base import Formatter, StringFormat from homebot.formatter import help, slack # pylint: disable=redefined-builtin __all__ = ['help', 'slack', 'Formatter', 'StringFormat']
27.875
78
0.753363
24
223
6.833333
0.583333
0.134146
0.243902
0
0
0
0
0
0
0
0
0
0.112108
223
7
79
31.857143
0.828283
0.237668
0
0
0
0
0.182927
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
a52bc5e80de5124ed23e93e1f9d1aeceba1a301e
237
py
Python
fima/viz/__init__.py
gpiantoni/fima
52bba27409f99dc22f3495e3adc907201f69387e
[ "MIT" ]
null
null
null
fima/viz/__init__.py
gpiantoni/fima
52bba27409f99dc22f3495e3adc907201f69387e
[ "MIT" ]
null
null
null
fima/viz/__init__.py
gpiantoni/fima
52bba27409f99dc22f3495e3adc907201f69387e
[ "MIT" ]
null
null
null
"""Functions to plot the results of the analyses""" from .tfr_chan import plot_tfr from .tfr_time import plot_tfr_time from .surf import plot_surf from .utils import to_div, to_html, to_png from .freqtime import plot_conditions_per_chan
33.857143
51
0.818565
42
237
4.333333
0.47619
0.21978
0.142857
0
0
0
0
0
0
0
0
0
0.126582
237
6
52
39.5
0.879227
0.189873
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a5323eafd7866ac3b6e89b60864f4f1936c8acd2
908
py
Python
tests/conftest.py
dlce-eva/python-nexus
44cd8eeece1522b2839fbf4d586913ccdef7fbfc
[ "BSD-2-Clause" ]
7
2020-12-16T14:17:15.000Z
2021-12-11T00:26:04.000Z
tests/conftest.py
dlce-eva/python-nexus
44cd8eeece1522b2839fbf4d586913ccdef7fbfc
[ "BSD-2-Clause" ]
13
2021-01-19T11:58:47.000Z
2022-03-28T20:52:28.000Z
tests/conftest.py
dlce-eva/python-nexus
44cd8eeece1522b2839fbf4d586913ccdef7fbfc
[ "BSD-2-Clause" ]
null
null
null
import pathlib import pytest from nexus import NexusReader @pytest.fixture def regression(): return pathlib.Path(__file__).parent / 'regression' @pytest.fixture def examples(): return pathlib.Path(__file__).parent / 'examples' @pytest.fixture def make_reader(examples): def _make(fname): return NexusReader.from_file(examples / fname) return _make @pytest.fixture def nex(make_reader): return make_reader('example.nex') @pytest.fixture def nex2(make_reader): return make_reader('example2.nex') @pytest.fixture def nexc(make_reader): return make_reader('example-characters.nex') @pytest.fixture def trees(make_reader): return make_reader('example.trees') @pytest.fixture def trees_translated(make_reader): return make_reader('example-translated.trees') @pytest.fixture def trees_beast(make_reader): return make_reader('example-beast.trees')
17.132075
55
0.746696
116
908
5.62069
0.224138
0.199387
0.220859
0.184049
0.455521
0.253067
0
0
0
0
0
0.002574
0.144273
908
52
56
17.461538
0.836551
0
0
0.28125
0
0
0.131057
0.050661
0
0
0
0
0
1
0.3125
false
0
0.09375
0.28125
0.71875
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
a57b37f5374bf5a218628f6acd4b14bde5c12f49
83
py
Python
graphite_feeder/handler/event/appliance/light/lux_balancing/__init__.py
majamassarini/automate-graphite-feeder
0f17f99bbdaab86e10e0b7d424d055ff44fc4ca0
[ "MIT" ]
null
null
null
graphite_feeder/handler/event/appliance/light/lux_balancing/__init__.py
majamassarini/automate-graphite-feeder
0f17f99bbdaab86e10e0b7d424d055ff44fc4ca0
[ "MIT" ]
null
null
null
graphite_feeder/handler/event/appliance/light/lux_balancing/__init__.py
majamassarini/automate-graphite-feeder
0f17f99bbdaab86e10e0b7d424d055ff44fc4ca0
[ "MIT" ]
null
null
null
from graphite_feeder.handler.event.appliance.light.lux_balancing import brightness
41.5
82
0.891566
11
83
6.545455
1
0
0
0
0
0
0
0
0
0
0
0
0.048193
83
1
83
83
0.911392
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a5a762e11b57e4879fd377c407e2c50e04d1ac0a
38,784
py
Python
multibeam_em712/decode.py
sustain-lab/multibeam-em712
90b2999f9ac2c605fd516d1e7317b343819a6fca
[ "MIT" ]
null
null
null
multibeam_em712/decode.py
sustain-lab/multibeam-em712
90b2999f9ac2c605fd516d1e7317b343819a6fca
[ "MIT" ]
null
null
null
multibeam_em712/decode.py
sustain-lab/multibeam-em712
90b2999f9ac2c605fd516d1e7317b343819a6fca
[ "MIT" ]
null
null
null
import glob import os import sys import struct import time import datetime import utm import xml.etree.ElementTree as ET import requests import dateutil.parser times = [] tides = [] noTides = -1 min_e = min_n = 99.0 max_e = max_n = 0.0 start_t = stop_t = -1 lastidt = 0 outfile = 0 # Version 2 of MRZ can contain seabed image from pings of different frequencies. # This is one way to separate them. freqs = [] # Files used to store different seabed images from different frequencies. freqfilewrites = [] activeOnOff=0 stdstr = "" #Receive an array of all tokens from a seabedimage-file #Return a string in csv-format def makeStringFromSeabedImage(tokval): no = len(tokval) # Interpolate between positions. First I skip the samples outside center # sample on each side prevX = "" prevY = "" thisX = "" thisY = "" prevIdx = -1 retval = "" for i in range(no): found = tokval[i].find('(') if (found >= 0): nxt = tokval[i + 1].find(')') if (nxt < 0): return "error" if (len(thisY) < 1): prevY = tokval[i] prevX = tokval[i + 1] prevIdx = i + 2 #First valid value thisX = prevX thisY = prevY else: thisY = tokval[i] thisX = tokval[i + 1] #Then interpolate the values between prevIdx and i; i-1 being the last #value #First convert string to float, then to utm px = float(prevX[:-1]) py = float(prevY[1:]) tx = float(thisX[:-1]) ty = float(thisY[1:]) u = utm.from_latlon(py, px) x1 = u[0] y1 = u[1] u = utm.from_latlon(ty, tx) x2 = u[0] y2 = u[1] noSamp = i - prevIdx xd = float(x2 - x1) / float(noSamp) yd = float(y2 - y1) / float(noSamp) for s in range(noSamp): samp = float(tokval[prevIdx + s]) * 0.1 x = x1 + xd * s y = y1 + yd * s str = "%.2f %.2f 0.0 %.1f\n" % (x,y,samp) retval += str break_me_here = 0 #debug purposes prevX = thisX prevY = thisY prevIdx = i + 2 #Then add the outer samples. Mirror the second (last) position to find the #direction and interpolation distance #You can change the output order if necessary to get a nice output in the #csv-file first = -1 for i in range(no): a = tokval[i].find('(') if (a >= 0): first = i break second = -1 for i in range(no): a = tokval[i].find('(') if (a >= 0): if (i != first): second = i break last = -1 for i in range(no): a = tokval[no - 1 - i].find('(') if (a >= 0): last = i break second_last = -1 for i in range(no): a = tokval[no - 1 - i].find('(') if (a >= 0): if (i != last): second_last = i break last = no - last - 1 second_last = no - second_last - 1 px = float(tokval[first + 1][:-1]) py = float(tokval[first][1:]) firstu = utm.from_latlon(py, px) firstx = firstu[0] firsty = firstu[1] px = float(tokval[second + 1][:-1]) py = float(tokval[second][1:]) secondu = utm.from_latlon(py, px) secondx = secondu[0] secondy = secondu[1] fxd = (secondx - firstx) / (second - first - 2) fyd = (secondy - firsty) / (second - first - 2) px = float(tokval[last + 1][:-1]) py = float(tokval[last][1:]) lastu = utm.from_latlon(py, px) lastx = lastu[0] lasty = lastu[1] px = float(tokval[second_last + 1][:-1]) py = float(tokval[second_last][1:]) second_lastu = utm.from_latlon(py, px) second_lastx = second_lastu[0] second_lasty = second_lastu[1] lxd = (second_lastx - lastx) / (second_last - last - 2) lyd = (second_lasty - lasty) / (second_last - last - 2) for i in range(no): if (i == first): break samp = float(tokval[i]) * 0.1 x = firstx - i * fxd # use - as we are moving outwards from second through first and beyond y = firsty - i * fyd str = "%.2f %.2f 0.0 %.1f\n" % (x,y,samp) retval += str for i in range(no): k = no - 1 - i if (k == last + 1): break samp = float(tokval[last + i + 2]) * 0.1 x = lastx + i * lxd y = lasty + i * lyd str = "%.2f %.2f 0.0 %.1f\n" % (x,y,samp) retval += str return retval def openTidefile(minn, mine, maxn, maxe, mint, maxt): global times global tides global noTides middle_n = minn + ((maxn - minn) / 2.0) middle_e = mine + ((maxe - mine) / 2.0) fromtime = datetime.datetime.fromtimestamp(mint) totime = datetime.datetime.fromtimestamp(maxt) rfrom = fromtime.isoformat() ttime = totime.isoformat() strs = 'http://api.sehavniva.no/tideapi.php?tide_request=locationdata&lat=%.8f&lon=%.8f&datatype=OBS&lang=nl&tzone=0&refcode=CD&fromtime=%s&totime=%s&interval=10' % (middle_n, middle_e, rfrom, ttime) try: r = requests.get(strs) root = ET.fromstring(r.text) for tidelevel in root.iter('waterlevel'): tide = tidelevel.attrib.get('value') time = tidelevel.attrib.get('time') yourdate = dateutil.parser.parse(time) # Tide in SIS is negative; ADD tide to get to correct level. tides.append(float(tide) * -0.01) times.append(int(yourdate.timestamp())) except: print(strs) print("Failed.\n\n") noTides = len(times) def getTide(secSinceEpoch): global lastidt global times global tides global noTides if (noTides <= 0): print("Cannot read tidefile") sys.exit(2) maxtime = times[noTides - 1] if (secSinceEpoch < times[0] or secSinceEpoch > maxtime): return 999999 p = lastidt while (times[p] >= secSinceEpoch): p = p - 1 while (times[p] <= secSinceEpoch): p = p + 1 n = p - 1 if (times[n] <= secSinceEpoch and times[p] >= secSinceEpoch): atime = float(times[n]) btime = float(times[p]) atide = float(tides[n]) btide = float(tides[p]) ntide = atide + (secSinceEpoch - atime) * (btide - atide) / (btime - atime) lastidt = n return ntide return 999999 # Process one depth datagram, #MRZ # lengtha and chunk are from processDatagram, see below # millisec is decoded from the header, so I send it in as a parameter here def processDepthDatagram2(millisec, lengtha, chunk): global outfile global min_e global min_n global max_e global max_n global start_t global stop_t global stdstr global freqs # Frequencies used in MF-mode global freqfiles global activeOnOff # Headersize is 4 bytes smaller than in the headerfile, remember that the 4 # bytes with the length has been dropped headersize = 1 + 1 + 1 + 1 + 1 + 1 + 2 + 4 + 4 partitionsize = 2 + 2 commonsize = 2 + 2 + 8 common = struct.Struct('HHBBBBBBBB') numBytesCmnPart, pingCnt, rxFansPerPing, rxFanIndex, swathsPerPing, swathAlongPosition, \ txTransducerInd, rxTransducerInd, numRxTransducers, algorithmType = common.unpack_from(chunk, headersize + partitionsize) pinginfo_size = 2 + 2 + 4 + 1 + 1 + 1 + 1 + 1 + 1 + 2 + 11 * 4 + 2 + 2 + 1 + 1 + 2 + 4 + 4 + 4 + 4 + 2 + 2 + 4 + 2 + 2 + 6 * 4 + 1 + 1 + 1 + 1 + 8 + 8 + 4 + 8 pinginfo = struct.Struct('HHfBBBBBBHfffffffffffhhBBHIfffHHfHHffffffBBBBddf') numBytesInfoData, padding0, pingRate_Hz, beamSpacing, depthMode,\ subDepthMode, distanceBtwSwath, detectionMode, pulseForm, \ padding01, frequencyMode_Hz, freqRangeLowLim_Hz, \ freqRangeHighLim_Hz, maxTotalTxPulseLength_sec, \ maxEffTxPulseLength_sec, maxEffTxBandWidth_Hz, \ absCoeff_dBPerkm, portSectorEdge_deg, \ starbSectorEdge_deg, portMeanCov_deg, \ starbMeanCov_deg, portMeanCov_m, \ starbMeanCov_m, modeAndStabilisation, \ runtimeFilter1, runtimeFilter2,\ pipeTrackingStatus, transmitArraySizeUsed_deg,\ receiveArraySizeUsed_deg, transmitPower_dB,\ SLrampUpTimeRemaining, padding1,\ yawAngle_deg, numTxSectors, numBytesPerTxSector,\ headingVessel_deg, soundSpeedAtTxDepth_mPerSec,\ txTransducerDepth_m, z_waterLevelReRefPoint_m, \ x_txTransducerArm_SCS_m, y_txTransducerArm_SCS_m,\ latLongInfo, posSensorStatus, attitudeSensorStatus,\ padding2, latitude_deg, longitude_deg,\ ellipsoidHeightReRefPoint_m = pinginfo.unpack_from(chunk, headersize + partitionsize + commonsize) # Bug in Python, fix it (binary alignments not correct) latlon = struct.Struct("d") klat = latlon.unpack_from(chunk, headersize + partitionsize + commonsize + 124) klon = latlon.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8) ellheight = struct.Struct("f") ellipsheight = ellheight.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8) latitude_deg = klat[0] longitude_deg = klon[0] ellipsoidHeightReRefPoint_m = ellipsheight[0] # Changed in Version 1 bsCorrectionOffset_dB = ellheight.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8 + 4)[0] byterec = struct.Struct("B") lambertsLawApplied = byterec.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8 + 4 + 4)[0] iceWindow = byterec.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8 + 4 + 4 + 1)[0] shortrec = struct.Struct("H") # The paddig is in version 2 used for activeModes. # Remember that this is just an indication that MF is in use; we must look at the # actual frequencyMode_Hz used and the pulsetype to determine which seabed image to use activeModes = shortrec.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8 + 4 + 4 + 1 + 1)[0] sec = int(millisec / 1000) # Pointer offset to sectorInfo sectorInfo_offset = headersize + partitionsize + commonsize + pinginfo_size # Changed from version 0 sectorInfo = struct.Struct('BBBBfffffffBBHfff') sectorInfo_size = 1 + 1 + 1 + 1 + 7 * 4 + 1 + 1 + 2 + 4 + 4 + 4 i = 0 while (i < numTxSectors): txSectorNumb, txArrNumber, txSubArray, padding0,\ sectorTransmitDelay_sec, tiltAngleReTx_deg,\ txNominalSourceLevel_dB, txFocusRange_m,\ centreFreq_Hz, signalBandWidth_Hz, \ totalSignalLength_sec, pulseShading, signalWaveForm,\ padding1, highVoltageLevel_dB, sectorTrackingCorr_dB, effectiveSignalLength_sec = sectorInfo.unpack_from(chunk, sectorInfo_offset + i * sectorInfo_size) i+=1 rxInfo_offset = sectorInfo_offset + numTxSectors * sectorInfo_size rxInfo = struct.Struct('HHHHffffHHHH') rxInfo_size = 2 + 2 + 2 + 2 + 4 + 4 + 4 + 4 + 2 + 2 + 2 + 2 numBytesRxInfo, numSoundingsMaxMain, numSoundingsValidMain, numBytesPerSounding, \ WCSampleRate, seabedImageSampleRate, BSnormal_dB, BSoblique_dB, \ extraDetectionAlarmFlag, numExtraDetections, numExtraDetectionClasses, \ numBytesPerClass = rxInfo.unpack_from(chunk, rxInfo_offset) extraDetClassInfo_offset = rxInfo_offset + rxInfo_size extraDetectionSize = 2 + 1 + 1 extraDetectionStruct = struct.Struct('HBB') sounding_offset = extraDetClassInfo_offset + numExtraDetectionClasses * extraDetectionSize soundingStruct = struct.Struct('HBBBBBBBBHffffffHHffffffffffffffffffHHHH') sounding_size = 2 + 8 + 2 + 6 * 4 + 2 + 2 + 18 * 4 + 4 * 2 #Offset to seabed image seabedImageStart = sounding_offset + (sounding_size * (numSoundingsMaxMain + numExtraDetections)) seabedStruct = struct.Struct('h') sbed_len = lengtha + 4 - seabedImageStart - 4 tot_no_sbed = sbed_len / 2 verify_length = tot_no_sbed * 2 lenStruct = struct.Struct('I') dgmlenver = seabedImageStart + sbed_len dgmlen = lenStruct.unpack_from(chunk,dgmlenver - 4)[0] # should be 4 more then lengtha outputstr = "\n%.8f %.8f %.2f %.2f %d\n" % (latitude_deg, longitude_deg, ellipsoidHeightReRefPoint_m, z_waterLevelReRefPoint_m, millisec) outfile.write(outputstr) sbed_start = seabedImageStart # This is the pointer to the start of the seabed image for current beam no_sbed_found = 0 i = 0 stdstr = "" while(i < numSoundingsMaxMain): soundingIndex, txSectorNumb, detectionType, \ detectionMethod, rejectionInfo1, rejectionInfo2, \ postProcessingInfo, detectionClass, detectionConfidenceLevel, \ padding, rangeFactor, qualityFactor, \ detectionUncertaintyVer_m, detectionUncertaintyHor_m, \ detectionWindowLength_sec, echoLength_sec, \ WCBeamNumb, WCrange_samples, WCNomBeamAngleAcross_deg, \ meanAbsCoeff_dBPerkm, reflectivity1_dB, reflectivity2_dB, \ receiverSensitivityApplied_dB, sourceLevelApplied_dB, \ BScalibration_dB, TVG_dB, beamAngleReRx_deg, \ beamAngleCorrection_deg, twoWayTravelTime_sec, \ twoWayTravelTimeCorrection_sec, deltaLatitude_deg, \ deltaLongitude_deg, z_reRefPoint_m, y_reRefPoint_m, \ x_reRefPoint_m, beamIncAngleAdj_deg, realTimeCleanInfo, \ SIstartRange_samples, SIcentreSample, \ SInumSamples = soundingStruct.unpack_from(chunk, sounding_offset + i * sounding_size) i+=1 # THIS IS IT. This is where we output xyz-points # Depths are referred to the reference point. To get it to the waterline, # SUBSTRACT the distance from # Error estimates are also available: detectionUncertaintyVer_m and # detectionUncertaintyHor_m waterlevel = z_reRefPoint_m - z_waterLevelReRefPoint_m plat = latitude_deg + deltaLatitude_deg plon = longitude_deg + deltaLongitude_deg outputstr = " %.8f %.8f %.2f %.2f %.2f" % (deltaLatitude_deg, deltaLongitude_deg, z_reRefPoint_m, detectionUncertaintyVer_m, detectionUncertaintyHor_m) outfile.write(outputstr) n = float(latitude_deg) e = float(longitude_deg) t = int(millisec) if (start_t < 0 or t < start_t): start_t = t if (t > stop_t): stop_t = t if (min_e > e): min_e = e if (min_n > n): min_n = n if (e > max_e): max_e = e if (n > max_n): max_n = n next_sbd_start = sbed_start + (2 * SInumSamples) if (y_reRefPoint_m < 0): # Reverse the output of the samples, see documentation sbed_start = next_sbd_start - 2 center_samp = SInumSamples - SIcentreSample else: center_samp = SIcentreSample for n in range(0, SInumSamples): no_sbed_found += 1 if (n == center_samp): # Put in position of center sample outputstr = " (%.8f %.8f) " % (plat, plon) str11 = stdstr + outputstr stdstr = str11 sbed_sample = seabedStruct.unpack_from(chunk, sbed_start)[0] outputstr = " %d" % (sbed_sample) str11 = stdstr + outputstr stdstr = str11 if (y_reRefPoint_m < 0): sbed_start -= 2 else: sbed_start += 2 # jump 2 bytes (short) forwards sbed_start = next_sbd_start # There are 9 samples per extra detection, and there may be 2 bytes padding # at the end if (i > 398): break_me_here = 0 # for debugging purposes snstr = makeStringFromSeabedImage(stdstr.split()) outw = outfileSBD if (activeModes == 1): middlefreq = int(frequencyMode_Hz)#Get rid of commas # Select file to open and write to if (activeOnOff == 0): activeOnOff = 1 print("Active mode found") found = 0 fcnt = 0 for fmd in freqs: fcnt = fcnt + 1 if (fmd == middlefreq): found = 1 outw = freqfilewrites[fcnt - 1] if (found == 0): # New entry fname = file + "_"+str(middlefreq)+".seabed.csv" freqs.append(middlefreq) freqfilewrites.append(open(fname,'w', encoding='utf-8')) outw = freqfilewrites[len(freqfilewrites)-1] outw.write(snstr) #outfileSBD.write(snstr) # Process one depth datagram, #MRZ # lengtha and chunk are from processDatagram, see below # millisec is decoded from the header, so I send it in as a parameter here def processDepthDatagram1(millisec, lengtha, chunk): global outfile global min_e global min_n global max_e global max_n global start_t global stop_t global stdstr # Headersize is 4 bytes smaller than in the headerfile, remember that the 4 # bytes with the length has been dropped headersize = 1 + 1 + 1 + 1 + 1 + 1 + 2 + 4 + 4 partitionsize = 2 + 2 commonsize = 2 + 2 + 8 common = struct.Struct('HHBBBBBBBB') numBytesCmnPart, pingCnt, rxFansPerPing, rxFanIndex, swathsPerPing, swathAlongPosition, \ txTransducerInd, rxTransducerInd, numRxTransducers, algorithmType = common.unpack_from(chunk, headersize + partitionsize) pinginfo_size = 2 + 2 + 4 + 1 + 1 + 1 + 1 + 1 + 1 + 2 + 11 * 4 + 2 + 2 + 1 + 1 + 2 + 4 + 4 + 4 + 4 + 2 + 2 + 4 + 2 + 2 + 6 * 4 + 1 + 1 + 1 + 1 + 8 + 8 + 4 + 8 pinginfo = struct.Struct('HHfBBBBBBHfffffffffffhhBBHIfffHHfHHffffffBBBBddf') numBytesInfoData, padding0, pingRate_Hz, beamSpacing, depthMode,\ subDepthMode, distanceBtwSwath, detectionMode, pulseForm, \ padding01, frequencyMode_Hz, freqRangeLowLim_Hz, \ freqRangeHighLim_Hz, maxTotalTxPulseLength_sec, \ maxEffTxPulseLength_sec, maxEffTxBandWidth_Hz, \ absCoeff_dBPerkm, portSectorEdge_deg, \ starbSectorEdge_deg, portMeanCov_deg, \ starbMeanCov_deg, portMeanCov_m, \ starbMeanCov_m, modeAndStabilisation, \ runtimeFilter1, runtimeFilter2,\ pipeTrackingStatus, transmitArraySizeUsed_deg,\ receiveArraySizeUsed_deg, transmitPower_dB,\ SLrampUpTimeRemaining, padding1,\ yawAngle_deg, numTxSectors, numBytesPerTxSector,\ headingVessel_deg, soundSpeedAtTxDepth_mPerSec,\ txTransducerDepth_m, z_waterLevelReRefPoint_m, \ x_txTransducerArm_SCS_m, y_txTransducerArm_SCS_m,\ latLongInfo, posSensorStatus, attitudeSensorStatus,\ padding2, latitude_deg, longitude_deg,\ ellipsoidHeightReRefPoint_m = pinginfo.unpack_from(chunk, headersize + partitionsize + commonsize) # Bug in Python, fix it (binary alignments not correct) latlon = struct.Struct("d") klat = latlon.unpack_from(chunk, headersize + partitionsize + commonsize + 124) klon = latlon.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8) ellheight = struct.Struct("f") ellipsheight = ellheight.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8) latitude_deg = klat[0] longitude_deg = klon[0] ellipsoidHeightReRefPoint_m = ellipsheight[0] # Changed in Version 1 bsCorrectionOffset_dB = ellheight.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8 + 4)[0] byterec = struct.Struct("B") lambertsLawApplied = byterec.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8 + 4 + 4)[0] iceWindow = byterec.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8 + 4 + 4 + 1)[0] shortrec = struct.Struct("H") padding4 = shortrec.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8 + 4 + 4 + 1 + 1)[0] sec = int(millisec / 1000) # Pointer offset to sectorInfo sectorInfo_offset = headersize + partitionsize + commonsize + pinginfo_size # Changed from version 0 sectorInfo = struct.Struct('BBBBfffffffBBHfff') sectorInfo_size = 1 + 1 + 1 + 1 + 7 * 4 + 1 + 1 + 2 + 4 + 4 + 4 i = 0 while (i < numTxSectors): txSectorNumb, txArrNumber, txSubArray, padding0,\ sectorTransmitDelay_sec, tiltAngleReTx_deg,\ txNominalSourceLevel_dB, txFocusRange_m,\ centreFreq_Hz, signalBandWidth_Hz, \ totalSignalLength_sec, pulseShading, signalWaveForm,\ padding1, highVoltageLevel_dB, sectorTrackingCorr_dB, effectiveSignalLength_sec = sectorInfo.unpack_from(chunk, sectorInfo_offset + i * sectorInfo_size) i+=1 rxInfo_offset = sectorInfo_offset + numTxSectors * sectorInfo_size rxInfo = struct.Struct('HHHHffffHHHH') rxInfo_size = 2 + 2 + 2 + 2 + 4 + 4 + 4 + 4 + 2 + 2 + 2 + 2 numBytesRxInfo, numSoundingsMaxMain, numSoundingsValidMain, numBytesPerSounding, \ WCSampleRate, seabedImageSampleRate, BSnormal_dB, BSoblique_dB, \ extraDetectionAlarmFlag, numExtraDetections, numExtraDetectionClasses, \ numBytesPerClass = rxInfo.unpack_from(chunk, rxInfo_offset) extraDetClassInfo_offset = rxInfo_offset + rxInfo_size extraDetectionSize = 2 + 1 + 1 extraDetectionStruct = struct.Struct('HBB') sounding_offset = extraDetClassInfo_offset + numExtraDetectionClasses * extraDetectionSize soundingStruct = struct.Struct('HBBBBBBBBHffffffHHffffffffffffffffffHHHH') sounding_size = 2 + 8 + 2 + 6 * 4 + 2 + 2 + 18 * 4 + 4 * 2 #Offset to seabed image seabedImageStart = sounding_offset + (sounding_size * (numSoundingsMaxMain + numExtraDetections)) seabedStruct = struct.Struct('h') sbed_len = lengtha + 4 - seabedImageStart - 4 tot_no_sbed = sbed_len / 2 verify_length = tot_no_sbed * 2 lenStruct = struct.Struct('I') dgmlenver = seabedImageStart + sbed_len dgmlen = lenStruct.unpack_from(chunk,dgmlenver - 4)[0] # should be 4 more then lengtha outputstr = "\n%.8f %.8f %.2f %.2f %d\n" % (latitude_deg, longitude_deg, ellipsoidHeightReRefPoint_m, z_waterLevelReRefPoint_m, millisec) outfile.write(outputstr) sbed_start = seabedImageStart # This is the pointer to the start of the seabed image for current beam no_sbed_found = 0 i = 0 stdstr = "" while(i < numSoundingsMaxMain): soundingIndex, txSectorNumb, detectionType, \ detectionMethod, rejectionInfo1, rejectionInfo2, \ postProcessingInfo, detectionClass, detectionConfidenceLevel, \ padding, rangeFactor, qualityFactor, \ detectionUncertaintyVer_m, detectionUncertaintyHor_m, \ detectionWindowLength_sec, echoLength_sec, \ WCBeamNumb, WCrange_samples, WCNomBeamAngleAcross_deg, \ meanAbsCoeff_dBPerkm, reflectivity1_dB, reflectivity2_dB, \ receiverSensitivityApplied_dB, sourceLevelApplied_dB, \ BScalibration_dB, TVG_dB, beamAngleReRx_deg, \ beamAngleCorrection_deg, twoWayTravelTime_sec, \ twoWayTravelTimeCorrection_sec, deltaLatitude_deg, \ deltaLongitude_deg, z_reRefPoint_m, y_reRefPoint_m, \ x_reRefPoint_m, beamIncAngleAdj_deg, realTimeCleanInfo, \ SIstartRange_samples, SIcentreSample, \ SInumSamples = soundingStruct.unpack_from(chunk, sounding_offset + i * sounding_size) i+=1 # THIS IS IT. This is where we output xyz-points # Depths are referred to the reference point. To get it to the waterline, # SUBSTRACT the distance from # Error estimates are also available: detectionUncertaintyVer_m and # detectionUncertaintyHor_m waterlevel = z_reRefPoint_m - z_waterLevelReRefPoint_m plat = latitude_deg + deltaLatitude_deg plon = longitude_deg + deltaLongitude_deg outputstr = " %.8f %.8f %.2f %.2f %.2f" % (deltaLatitude_deg, deltaLongitude_deg, z_reRefPoint_m, detectionUncertaintyVer_m, detectionUncertaintyHor_m) outfile.write(outputstr) n = float(latitude_deg) e = float(longitude_deg) t = int(millisec) if (start_t < 0 or t < start_t): start_t = t if (t > stop_t): stop_t = t if (min_e > e): min_e = e if (min_n > n): min_n = n if (e > max_e): max_e = e if (n > max_n): max_n = n next_sbd_start = sbed_start + (2 * SInumSamples) if (y_reRefPoint_m < 0): # Reverse the output of the samples, see documentation sbed_start = next_sbd_start - 2 center_samp = SInumSamples - SIcentreSample else: center_samp = SIcentreSample for n in range(0, SInumSamples): no_sbed_found += 1 if (n == center_samp): # Put in position of center sample outputstr = " (%.8f %.8f) " % (plat, plon) str11 = stdstr + outputstr stdstr = str11 sbed_sample = seabedStruct.unpack_from(chunk, sbed_start)[0] outputstr = " %d" % (sbed_sample) str11 = stdstr + outputstr stdstr = str11 if (y_reRefPoint_m < 0): sbed_start -= 2 else: sbed_start += 2 # jump 2 bytes (short) forwards sbed_start = next_sbd_start # There are 9 samples per extra detection, and there may be 2 bytes padding # at the end if (i > 398): break_me_here = 0 # for debugging purposes snstr = makeStringFromSeabedImage(stdstr.split()) outfileSBD.write(snstr) # Process one depth datagram, #MRZ # lengtha and chunk are from processDatagram, see below # millisec is decoded from the header, so I send it in as a parameter here def processDepthDatagram(millisec, lengtha, chunk): global outfile global min_e global min_n global max_e global max_n global start_t global stop_t global stdstr # Headersize is 4 bytes smaller than in the headerfile, remember that the 4 # bytes with the length has been dropped headersize = 1 + 1 + 1 + 1 + 1 + 1 + 2 + 4 + 4 partitionsize = 2 + 2 commonsize = 2 + 2 + 8 common = struct.Struct('HHBBBBBBBB') numBytesCmnPart, pingCnt, rxFansPerPing, rxFanIndex, swathsPerPing, swathAlongPosition, \ txTransducerInd, rxTransducerInd, numRxTransducers, algorithmType = common.unpack_from(chunk, headersize + partitionsize) pinginfo_size = 2 + 2 + 4 + 1 + 1 + 1 + 1 + 1 + 1 + 2 + 11 * 4 + 2 + 2 + 1 + 1 + 2 + 4 + 4 + 4 + 4 + 2 + 2 + 4 + 2 + 2 + 6 * 4 + 1 + 1 + 1 + 1 + 8 + 8 + 4 pinginfo = struct.Struct('HHfBBBBBBHfffffffffffhhBBHIfffHHfHHffffffBBBBddf') numBytesInfoData, padding0, pingRate_Hz, beamSpacing, depthMode,\ subDepthMode, distanceBtwSwath, detectionMode, pulseForm, \ padding01, frequencyMode_Hz, freqRangeLowLim_Hz, \ freqRangeHighLim_Hz, maxTotalTxPulseLength_sec, \ maxEffTxPulseLength_sec, maxEffTxBandWidth_Hz, \ absCoeff_dBPerkm, portSectorEdge_deg, \ starbSectorEdge_deg, portMeanCov_deg, \ starbMeanCov_deg, portMeanCov_m, \ starbMeanCov_m, modeAndStabilisation, \ runtimeFilter1, runtimeFilter2,\ pipeTrackingStatus, transmitArraySizeUsed_deg,\ receiveArraySizeUsed_deg, transmitPower_dB,\ SLrampUpTimeRemaining, padding1,\ yawAngle_deg, numTxSectors, numBytesPerTxSector,\ headingVessel_deg, soundSpeedAtTxDepth_mPerSec,\ txTransducerDepth_m, z_waterLevelReRefPoint_m, \ x_txTransducerArm_SCS_m, y_txTransducerArm_SCS_m,\ latLongInfo, posSensorStatus, attitudeSensorStatus,\ padding2, latitude_deg, longitude_deg,\ ellipsoidHeightReRefPoint_m = pinginfo.unpack_from(chunk, headersize + partitionsize + commonsize) # Bug in Python, fix it (binary alignments not correct) latlon = struct.Struct("d") klat = latlon.unpack_from(chunk, headersize + partitionsize + commonsize + 124) klon = latlon.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8) ellheight = struct.Struct("f") ellipsheight = ellheight.unpack_from(chunk, headersize + partitionsize + commonsize + 124 + 8 + 8) latitude_deg = klat[0] longitude_deg = klon[0] ellipsoidHeightReRefPoint_m = ellipsheight[0] sec = int(millisec / 1000) # Pointer offset to sectorInfo sectorInfo_offset = headersize + partitionsize + commonsize + pinginfo_size sectorInfo = struct.Struct('BBBBfffffffBBH') sectorInfo_size = 1 + 1 + 1 + 1 + 7 * 4 + 1 + 1 + 2 i = 0 while (i < numTxSectors): txSectorNumb, txArrNumber, txSubArray, padding0,\ sectorTransmitDelay_sec, tiltAngleReTx_deg,\ txNominalSourceLevel_dB, txFocusRange_m,\ centreFreq_Hz, signalBandWidth_Hz, \ totalSignalLength_sec, pulseShading, signalWaveForm,\ padding1 = sectorInfo.unpack_from(chunk, sectorInfo_offset + i * sectorInfo_size) i+=1 rxInfo_offset = sectorInfo_offset + numTxSectors * sectorInfo_size rxInfo = struct.Struct('HHHHffffHHHH') rxInfo_size = 2 + 2 + 2 + 2 + 4 + 4 + 4 + 4 + 2 + 2 + 2 + 2 numBytesRxInfo, numSoundingsMaxMain, numSoundingsValidMain, numBytesPerSounding, \ WCSampleRate, seabedImageSampleRate, BSnormal_dB, BSoblique_dB, \ extraDetectionAlarmFlag, numExtraDetections, numExtraDetectionClasses, \ numBytesPerClass = rxInfo.unpack_from(chunk, rxInfo_offset) extraDetClassInfo_offset = rxInfo_offset + rxInfo_size extraDetectionSize = 2 + 1 + 1 extraDetectionStruct = struct.Struct('HBB') sounding_offset = extraDetClassInfo_offset + numExtraDetectionClasses * extraDetectionSize soundingStruct = struct.Struct('HBBBBBBBBHffffffHHffffffffffffffffffHHHH') sounding_size = 2 + 8 + 2 + 6 * 4 + 2 + 2 + 18 * 4 + 4 * 2 #Offset to seabed image seabedImageStart = sounding_offset + (sounding_size * (numSoundingsMaxMain + numExtraDetections)) seabedStruct = struct.Struct('h') sbed_len = lengtha + 4 - seabedImageStart - 4 tot_no_sbed = sbed_len / 2 verify_length = tot_no_sbed * 2 lenStruct = struct.Struct('I') dgmlenver = seabedImageStart + sbed_len dgmlen = lenStruct.unpack_from(chunk,dgmlenver - 4)[0] # should be 4 more then lengtha outputstr = "\n%.8f %.8f %.2f %.2f %d\n" % (latitude_deg, longitude_deg, ellipsoidHeightReRefPoint_m, z_waterLevelReRefPoint_m, millisec) outfile.write(outputstr) sbed_start = seabedImageStart # This is the pointer to the start of the seabed image for current beam no_sbed_found = 0 i = 0 stdstr = "" while(i < numSoundingsMaxMain): soundingIndex, txSectorNumb, detectionType, \ detectionMethod, rejectionInfo1, rejectionInfo2, \ postProcessingInfo, detectionClass, detectionConfidenceLevel, \ padding, rangeFactor, qualityFactor, \ detectionUncertaintyVer_m, detectionUncertaintyHor_m, \ detectionWindowLength_sec, echoLength_sec, \ WCBeamNumb, WCrange_samples, WCNomBeamAngleAcross_deg, \ meanAbsCoeff_dBPerkm, reflectivity1_dB, reflectivity2_dB, \ receiverSensitivityApplied_dB, sourceLevelApplied_dB, \ BScalibration_dB, TVG_dB, beamAngleReRx_deg, \ beamAngleCorrection_deg, twoWayTravelTime_sec, \ twoWayTravelTimeCorrection_sec, deltaLatitude_deg, \ deltaLongitude_deg, z_reRefPoint_m, y_reRefPoint_m, \ x_reRefPoint_m, beamIncAngleAdj_deg, realTimeCleanInfo, \ SIstartRange_samples, SIcentreSample, \ SInumSamples = soundingStruct.unpack_from(chunk, sounding_offset + i * sounding_size) i+=1 # THIS IS IT. This is where we output xyz-points # Depths are referred to the reference point. To get it to the waterline, # SUBSTRACT the distance from # Error estimates are also available: detectionUncertaintyVer_m and # detectionUncertaintyHor_m waterlevel = z_reRefPoint_m - z_waterLevelReRefPoint_m plat = latitude_deg + deltaLatitude_deg plon = longitude_deg + deltaLongitude_deg outputstr = " %.8f %.8f %.2f %.2f %.2f" % (deltaLatitude_deg, deltaLongitude_deg, z_reRefPoint_m, detectionUncertaintyVer_m, detectionUncertaintyHor_m) outfile.write(outputstr) n = float(latitude_deg) e = float(longitude_deg) t = int(millisec) if (start_t < 0 or t < start_t): start_t = t if (t > stop_t): stop_t = t if (min_e > e): min_e = e if (min_n > n): min_n = n if (e > max_e): max_e = e if (n > max_n): max_n = n next_sbd_start = sbed_start + (2 * SInumSamples) if (y_reRefPoint_m < 0): # Reverse the output of the samples, see documentation sbed_start = next_sbd_start - 2 center_samp = SInumSamples - SIcentreSample else: center_samp = SIcentreSample for n in range(0, SInumSamples): no_sbed_found += 1 if (n == center_samp): # Put in position of center sample outputstr = " (%.8f %.8f) " % (plat, plon) str11 = stdstr + outputstr stdstr = str11 sbed_sample = seabedStruct.unpack_from(chunk, sbed_start)[0] outputstr = " %d" % (sbed_sample) str11 = stdstr + outputstr stdstr = str11 if (y_reRefPoint_m < 0): sbed_start -= 2 else: sbed_start += 2 # jump 2 bytes (short) forwards sbed_start = next_sbd_start # There are 9 samples per extra detection, and there may be 2 bytes padding # at the end if (i > 398): break_me_here = 0 # for debugging purposes snstr = makeStringFromSeabedImage(stdstr.split()) outfileSBD.write(snstr) # What happens in processDatagram? Read the documentation of the kmall-format. # This is the processing of the datagram to find out what datagram type this # is. # The processing of each datagram type takes place in specific routines def processDatagram(lengtha, chunk): header_without_length = struct.Struct('ccccBBHII') dgm_type0,dgm_type1,dgm_type2,dgm_type3,dgm_version,sysid,emid,sec,nsec = header_without_length.unpack_from(chunk,0) dgm_type = dgm_type0 + dgm_type1 + dgm_type2 + dgm_type3 # Decode time nanosec = sec nanosec *= 1E9 nanosec += nsec millisec = nanosec millisec /= 1E6 strk = dgm_type.decode() if (strk == '#MRZ'): if (dgm_version == 0): processDepthDatagram(millisec, lengtha, chunk) if (dgm_version == 1): processDepthDatagram1(millisec, lengtha, chunk) if (dgm_version == 2): processDepthDatagram2(millisec, lengtha, chunk) # I shall not humiliate any developer by documenting this main program. # The processing of the datagram takes place in the routine processDatagram. files = glob.glob('*.kmall') for file in files: try: activeOnOff = 0 f = open(file, 'rb') nfile = file + ".pings" outfile = open(nfile,'w', encoding='utf-8') nfile = file + ".seabed.csv" outfileSBD = open(nfile,'w', encoding='utf-8') freqs = [] for g in freqfilewrites: g.close() freqfilewrites = [] except Exception: print('File',file,'not opened.') sys.exit(0) print(file) # Process the file: f.seek(0, 2) file_size = f.tell() f.seek(0, 0) remaining = file_size # Read all datagrams and process each of them while (remaining > 0): # First read 4 bytes that contains the length of the chunk lengthb = struct.unpack("I",f.read(4)) remaining -= 4 # Then read the chunk. Note that the length read includes the 4 bytes in the # integer. dgmsize = lengthb[0] - 4 chunk = f.read(dgmsize) remaining -= dgmsize # Then process this chunk try: processDatagram(dgmsize, chunk) except Exception as e: print('Exception encountered', str(e)) f.close() outfile.close() outfileSBD.close() # Remove empty seabed image files filelist = glob.glob('*.seabed.csv') for file in filelist: if (os.stat(file).st_size == 0): os.remove(file) aatime = int(start_t / 1e3) - (24 * 60 * 60) bbtime = int(stop_t / 1e3) + (24 * 60 * 60) openTidefile(min_n, min_e, max_n, max_e, aatime, bbtime) filelist = glob.glob('*.pings') for file in filelist: try: f = open(file) nfile = file + ".tidecorrected.utm.csv" outfile = open(nfile,'w', encoding='utf-8') except Exception: print('File',file,'not opened.') sys.exit(0) print(file) if (len(times) <= 0): print("no tides, depths not corrected for tide") line = f.readline() while (line): toks = line.split() if (len(toks) == 5): n = float(toks[0]) e = float(toks[1]) tm = int(int(toks[4]) / 1e3) tide = getTide(tm) if (tide > 9999): tide = 0 toWlev = float(toks[3]) + tide else: cnt = 0 while(cnt < len(toks)): lat = n + float(toks[cnt]) lon = e + float(toks[cnt + 1]) dpt = float(toks[cnt + 2]) + toWlev u = utm.from_latlon(lat, lon) outputstr = "%.2f %.2f %.2f\n" % (u[0], u[1], dpt * -1.0) outfile.write(outputstr) cnt += 5 line = f.readline() f.close()
40.782334
204
0.621261
4,357
38,784
5.387423
0.139775
0.005368
0.024922
0.00409
0.773144
0.761811
0.750522
0.745495
0.742768
0.742768
0
0.029219
0.288753
38,784
950
205
40.825263
0.821715
0.121545
0
0.680628
0
0.001309
0.03387
0.008664
0
0
0
0
0
1
0.009162
false
0
0.013089
0
0.028796
0.013089
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
a5b7581b7b30a01fc9e62df612d303ae57d22d2e
255
py
Python
backend/exceptions.py
jacklee1792/spiggy
8ebb8d3d5465cce1dc45691a914e31922fb5b3cc
[ "MIT" ]
9
2021-05-17T01:53:46.000Z
2021-12-29T04:05:32.000Z
backend/exceptions.py
jacklee1792/spiggy
8ebb8d3d5465cce1dc45691a914e31922fb5b3cc
[ "MIT" ]
null
null
null
backend/exceptions.py
jacklee1792/spiggy
8ebb8d3d5465cce1dc45691a914e31922fb5b3cc
[ "MIT" ]
4
2021-05-22T15:13:42.000Z
2021-08-22T16:08:18.000Z
class ResponseCodeError(Exception): """ Called when the Skyblock API returns an unexpected response code. """ pass class UnexpectedUpdateError(Exception): """ Called when the Skyblock API updates during a cache. """ pass
19.615385
69
0.67451
27
255
6.37037
0.703704
0.174419
0.22093
0.255814
0.383721
0.383721
0
0
0
0
0
0
0.247059
255
12
70
21.25
0.895833
0.462745
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
a5ba932544970f9ce5e419f6c1317abd2ad2ec15
2,418
py
Python
project/lib/decorators.py
feilaoda/FlickBoard
21e6364117e336f4eb60d83f496d9fc1cb2784ae
[ "MIT" ]
2
2016-07-21T08:52:30.000Z
2017-06-15T06:31:30.000Z
project/lib/decorators.py
feilaoda/FlickBoard
21e6364117e336f4eb60d83f496d9fc1cb2784ae
[ "MIT" ]
null
null
null
project/lib/decorators.py
feilaoda/FlickBoard
21e6364117e336f4eb60d83f496d9fc1cb2784ae
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import functools import urllib from tornado.web import HTTPError from tornado.options import options def admin(method): """Decorate with this method to restrict to site admins.""" @functools.wraps(method) def wrapper(self, *args, **kwargs): if not self.current_user: if self.request.method == "GET": url = self.get_login_url() if "?" not in url: url += "?" + urllib.urlencode(dict(next=self.request.full_url())) self.redirect(url) return raise HTTPError(403) elif not self.is_admin: if self.request.method == "GET": self.redirect(options.home_url) return raise HTTPError(403) else: return method(self, *args, **kwargs) return wrapper def editor(method): """Decorate with this method to restrict to site editor.""" @functools.wraps(method) def wrapper(self, *args, **kwargs): if not self.current_user: if self.request.method == "GET": url = self.get_login_url() if "?" not in url: url += "?" + urllib.urlencode(dict(next=self.request.full_url())) self.redirect(url) return raise HTTPError(403) elif not self.is_editor: if self.request.method == "GET": self.redirect(options.home_url) return raise HTTPError(403) else: return method(self, *args, **kwargs) return wrapper def authenticated(method): """Decorate methods with this to require that the user be logged in. Fix the redirect url with full_url. Tornado use uri by default. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): user = self.current_user if not user: if self.request.method == "GET": url = self.get_login_url() if "?" not in url: url += "?" + urllib.urlencode(dict(next=self.request.full_url())) self.redirect(url) return raise HTTPError(403) #self._current_user = user return method(self, *args, **kwargs) return wrapper
32.675676
86
0.528122
264
2,418
4.765152
0.227273
0.069952
0.066773
0.075517
0.762321
0.762321
0.762321
0.73132
0.696343
0.626391
0
0.010478
0.368486
2,418
73
87
33.123288
0.81336
0.118693
0
0.803571
0
0
0.010401
0
0
0
0
0
0
1
0.107143
false
0
0.071429
0
0.375
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
3c18277e5812880a771803b7a7fd0aad9a5386c9
36
py
Python
ndscheduler/core/scheduler/__init__.py
hairbeartoe/ndscheduler
a5a6b1fc84ffa3d6765f3d63152198d4cffab4e9
[ "BSD-2-Clause" ]
3
2019-06-28T08:30:10.000Z
2021-09-10T20:55:41.000Z
ndscheduler/core/scheduler/__init__.py
hairbeartoe/ndscheduler
a5a6b1fc84ffa3d6765f3d63152198d4cffab4e9
[ "BSD-2-Clause" ]
1
2017-11-16T23:49:21.000Z
2017-11-16T23:49:21.000Z
ndscheduler/core/scheduler/__init__.py
hairbeartoe/ndscheduler
a5a6b1fc84ffa3d6765f3d63152198d4cffab4e9
[ "BSD-2-Clause" ]
3
2017-02-09T19:32:31.000Z
2017-05-04T05:43:13.000Z
"""Customize the core scheduler."""
18
35
0.694444
4
36
6.25
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
36
1
36
36
0.78125
0.805556
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
3c3e0f15f06a0c26582ddd69a84fda03cc532bab
172
py
Python
mealie/routes/groups/groups.py
BryceStevenWilley/mealie
20c3873980f6d5d01e86ce6ef7915d95a06d8f6a
[ "MIT" ]
1,927
2021-01-02T20:01:15.000Z
2022-03-31T22:49:18.000Z
mealie/routes/groups/groups.py
BryceStevenWilley/mealie
20c3873980f6d5d01e86ce6ef7915d95a06d8f6a
[ "MIT" ]
545
2021-01-02T20:32:28.000Z
2022-03-31T23:37:08.000Z
mealie/routes/groups/groups.py
BryceStevenWilley/mealie
20c3873980f6d5d01e86ce6ef7915d95a06d8f6a
[ "MIT" ]
254
2021-01-03T00:20:07.000Z
2022-03-23T22:37:21.000Z
from fastapi import APIRouter from mealie.routes.groups import crud router = APIRouter() router.include_router(crud.admin_router) router.include_router(crud.user_router)
21.5
40
0.837209
24
172
5.833333
0.5
0.185714
0.271429
0.328571
0
0
0
0
0
0
0
0
0.087209
172
7
41
24.571429
0.89172
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
3c44b927bd9a26aa5bca72bcff60d5b394aa30b8
30
py
Python
voxelbotutils/__init__.py
6days9weeks/Novus
a21157f15d7a07669cb75b3f023bd9eedf40e40e
[ "MIT" ]
2
2022-01-22T16:05:42.000Z
2022-01-22T16:06:07.000Z
voxelbotutils/__init__.py
6days9weeks/Novus
a21157f15d7a07669cb75b3f023bd9eedf40e40e
[ "MIT" ]
null
null
null
voxelbotutils/__init__.py
6days9weeks/Novus
a21157f15d7a07669cb75b3f023bd9eedf40e40e
[ "MIT" ]
null
null
null
from discord.ext.vbu import *
15
29
0.766667
5
30
4.6
1
0
0
0
0
0
0
0
0
0
0
0
0.133333
30
1
30
30
0.884615
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
b1db715b8bd221b0d4a24e82568870f2a297c12b
49
py
Python
ssds/abc/__init__.py
claytsay/ssds-python
7396c6b7271710a82001ab91d92ef8bd33108a0e
[ "Apache-2.0" ]
1
2020-07-04T23:11:56.000Z
2020-07-04T23:11:56.000Z
ssds/abc/__init__.py
claytsay/ssds-python
7396c6b7271710a82001ab91d92ef8bd33108a0e
[ "Apache-2.0" ]
null
null
null
ssds/abc/__init__.py
claytsay/ssds-python
7396c6b7271710a82001ab91d92ef8bd33108a0e
[ "Apache-2.0" ]
null
null
null
from ssds.abc.PriorityQueue import PriorityQueue
24.5
48
0.877551
6
49
7.166667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.081633
49
1
49
49
0.955556
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
5901d1fe8bbc5cf85f31d69745fef7d43d2146f9
18,360
py
Python
tests/aws/test_s3.py
kislerdm/cloud-connectors
1a6ab99df7dbbac333fe69873fda1090e0d0b0b1
[ "MIT" ]
null
null
null
tests/aws/test_s3.py
kislerdm/cloud-connectors
1a6ab99df7dbbac333fe69873fda1090e0d0b0b1
[ "MIT" ]
null
null
null
tests/aws/test_s3.py
kislerdm/cloud-connectors
1a6ab99df7dbbac333fe69873fda1090e0d0b0b1
[ "MIT" ]
1
2020-06-29T02:12:56.000Z
2020-06-29T02:12:56.000Z
# pylint: disable=missing-function-docstring import os import sys import json import inspect import warnings import logging from moto import mock_s3 # type: ignore import boto3 # type: ignore from cloud_connectors.aws import s3 as module logging.basicConfig(level=logging.ERROR, format="[line: %(lineno)s] %(message)s") LOGGER = logging.getLogger(__name__) warnings.simplefilter(action="ignore", category=FutureWarning) CLASSES = {"Client"} CLASS_METHODS = { "CLIENT_CONFIG_SCHEMA", "S3_TRANSFER_SCHEMA", "list_buckets", "list_objects", "list_objects_size", "read", "write", "upload", "download", "copy", "move", "delete_object", "delete_objects", } def test_module_miss_classes() -> None: missing = CLASSES.difference(set(module.__dir__())) if missing: LOGGER.error(f"""Class(es) '{"', '".join(missing)}' is(are) missing.""") sys.exit(1) def test_class_client_miss_methods() -> None: model_members = inspect.getmembers(module.Client) missing = CLASS_METHODS.difference({i[0] for i in model_members}) if missing: LOGGER.error(f"""Class 'Client' Method(s) '{"', '".join(missing)}' is(are) missing.""") sys.exit(1) def test_connection() -> None: tests = [ { "type": "valid", "config": { "aws_access_key_id": "AKIAAAAAAAAAAAAA1111", "aws_secret_access_key": "aaaaaaaaxxxxxxxx02330128skjjhasdg7723s!!", }, }, { "type": "failty_config_key", "config": { "aws_access_key_id": "AKIAAAAAAAAAAAAA11", "aws_secret_access_key": "aaaaaaaaxxxxxxxx02330128skjjhasdg7723s!!", }, }, { "type": "failty_config_secret", "config": { "aws_access_key_id": "AKIAAAAAAAAAAAAA1111", "aws_secret_access_key": "aaaaaaaaxxxxxxxx02330128skjjhasdg7723s", }, }, ] for test in tests: try: _ = module.Client(test["config"]) except Exception as ex: if test["type"] == "valid": LOGGER.error(f"Failed client instance: {ex}") sys.exit(1) elif test["type"] == "failty_config_key": if type(ex).__name__ != "ConfigurationError": LOGGER.error("Wrong error type to handle config error") sys.exit(1) if "data.aws_access_key_id" not in str(ex): LOGGER.error(f"Configuration validator error - key: {ex}") sys.exit(1) elif test["type"] == "failty_config_secret": if type(ex).__name__ != "ConfigurationError": LOGGER.error("Wrong error type to handle config error") sys.exit(1) if "data.aws_secret_access_key" not in str(ex): LOGGER.error(f"Configuration validator error - secret: {ex}") sys.exit(1) @mock_s3 def test_list_buckets() -> None: tests = [ {"create": None, "want": [],}, {"create": "test", "want": ["test"],}, {"create": "test1", "want": ["test", "test1"]}, ] def _create_dummy_buckets(bucket: str) -> None: boto3.client("s3").create_bucket(Bucket=bucket) client = module.Client() for test in tests: if test["create"]: _create_dummy_buckets(test["create"]) buckets = client.list_buckets() if buckets != test["want"]: LOGGER.error(f"Error listing buckets. got: {buckets}, want: {test['want']}") sys.exit(1) BUCKET = "test_bucket" OBJ_CONTENT = {"products": [{"a": 1}, {"b": "foo"},]} def put_object(mock_client: boto3.client, obj_key: str) -> None: mock_client.put_object( Bucket=BUCKET, Key=obj_key, Body=json.dumps(OBJ_CONTENT), ContentType="application/json", ) @mock_s3 def test_list_objects() -> None: mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) tests = [ {"success": False, "create": None, "want": [],}, {"success": True, "create": None, "want": [],}, {"success": True, "create": "test.json", "want": ["test.json"],}, {"success": True, "create": "test1.json", "want": ["test.json", "test1.json"],}, {"success": True, "create": "blah.json", "want": ["test.json", "test1.json"],}, ] client = module.Client() for test in tests: if test["success"]: if test["create"]: put_object(mock_client, test["create"]) objects = client.list_objects(bucket=BUCKET, prefix="test") if objects != test["want"]: LOGGER.error(f"Error listing objects. got: {objects}, want: {test['want']}") sys.exit(1) else: try: objects = client.list_objects(bucket=f"{BUCKET}_bar", prefix="test") except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) @mock_s3 def test_list_objects_size() -> None: mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) tests = [ {"success": False, "create": None, "want": [],}, {"success": True, "create": None, "want": [],}, {"success": True, "create": "test.json", "want": [("test.json", 38)],}, {"success": True, "create": "test1.json", "want": [("test.json", 38), ("test1.json", 38)],}, {"success": True, "create": "blah.json", "want": [("test.json", 38), ("test1.json", 38)],}, ] client = module.Client() for test in tests: if test["success"]: if test["create"]: put_object(mock_client, test["create"]) objects = client.list_objects_size(bucket=BUCKET, prefix="test") if objects != test["want"]: LOGGER.error(f"Error listing objects. got: {objects}, want: {test['want']}") sys.exit(1) else: try: objects = client.list_objects_size(bucket=f"{BUCKET}_bar", prefix="test") except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) @mock_s3 def test_read() -> None: path = "test.json" mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) put_object(mock_client, path) client = module.Client() try: _ = client.read(bucket=f"{BUCKET}_bar", path=path) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) try: _ = client.read(bucket=BUCKET, path=f"{path}_bar") except Exception as ex: if type(ex).__name__ != "ObjectNotFound": LOGGER.error("Wrong error type to handle NoSuchKey error") sys.exit(1) obj = client.read(bucket=BUCKET, path=path) if json.loads(obj) != OBJ_CONTENT: LOGGER.error("Error reading object") sys.exit(1) try: client.read("", "test.json") except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle InvalidBucketName error") sys.exit(1) @mock_s3 def test_write() -> None: path = "test.json" mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) client = module.Client() try: client.write(obj=json.dumps(OBJ_CONTENT), bucket=f"{BUCKET}_bar", path=path) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) try: client.write(obj=OBJ_CONTENT, bucket=BUCKET, path=path) except Exception as ex: if type(ex).__name__ != "TypeError": LOGGER.error("Wrong error type to handle ParamValidationError error") sys.exit(1) client.write(obj=json.dumps(OBJ_CONTENT), bucket=BUCKET, path=path) obj = mock_client.get_object(Bucket=BUCKET, Key=path)["Body"].read() if json.loads(obj) != OBJ_CONTENT: LOGGER.error("Error writing object") sys.exit(1) @mock_s3 def test_upload() -> None: path = "test.json" path_os = f"/tmp/{path}" mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) client = module.Client() try: client.upload(bucket=BUCKET, path_source=path_os, path_destination=path) except Exception as ex: if type(ex).__name__ != "FileNotFoundError": LOGGER.error("Wrong error type to handle FileNotFoundError error") sys.exit(1) with open(path_os, "w") as f: json.dump(OBJ_CONTENT, f) try: client.upload(bucket=f"{BUCKET}_bar", path_source=f"/tmp/{path}", path_destination=path) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) client.upload(bucket=BUCKET, path_source=path_os, path_destination=path) obj = mock_client.get_object(Bucket=BUCKET, Key=path)["Body"].read() if json.loads(obj) != OBJ_CONTENT: LOGGER.error("Error writing object") sys.exit(1) os.remove(path_os) @mock_s3 def test_download() -> None: path = "test.json" path_os = f"/tmp/{path}" mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) put_object(mock_client, path) client = module.Client() try: client.download(bucket=f"{BUCKET}_bar", path_source=path, path_destination=path_os) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) try: client.download(bucket=BUCKET, path_source=f"{path}_bar", path_destination=path_os) except Exception as ex: if type(ex).__name__ != "ObjectNotFound": LOGGER.error("Wrong error type to handle NoSuchKey error") sys.exit(1) try: client.download(bucket=BUCKET, path_source=path, path_destination="/bin/test.json") except Exception as ex: if type(ex).__name__ != "DestinationPathPermissionsError": LOGGER.error("Wrong error type to handle write permissoin error") sys.exit(1) try: client.download( bucket=BUCKET, path_source=path, path_destination="/tmp/s3_test____/test.json", ) except Exception as ex: if type(ex).__name__ != "DestinationPathError": LOGGER.error("Wrong error type to handle desination directory error") sys.exit(1) client.download(bucket=BUCKET, path_source=path, path_destination=path_os) with open(path_os, "r") as f: if json.load(f) != OBJ_CONTENT: LOGGER.error("Error downloading object - content") sys.exit(1) os.remove(path_os) try: client.download( bucket=BUCKET, path_source=path, path_destination=path_os, configuration={"a": 1} ) except Exception as ex: if type(ex).__name__ != "ConfigurationError": LOGGER.error("Wrong error type to handle download configuration error") sys.exit(1) @mock_s3 def test_copy() -> None: path = "test.json" mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) mock_client.create_bucket(Bucket=f"{BUCKET}_destination") put_object(mock_client, path) client = module.Client() try: client.copy( bucket_source=f"{BUCKET}_bar", bucket_destination=f"{BUCKET}_destination", path_source=path, ) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) try: client.copy( bucket_source=BUCKET, bucket_destination=f"{BUCKET}_destination", path_source=f"{path}_bar", ) except Exception as ex: if type(ex).__name__ != "ObjectNotFound": LOGGER.error("Wrong error type to handle NoSuchKey error") sys.exit(1) try: client.copy( bucket_source=BUCKET, bucket_destination=f"{BUCKET}_destination_bar", path_source=path, ) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) client.copy( bucket_source=BUCKET, bucket_destination=f"{BUCKET}_destination", path_source=path, ) obj = mock_client.get_object(Bucket=BUCKET, Key=path) if json.loads(obj["Body"].read()) != OBJ_CONTENT: LOGGER.error("Error copying object - content") sys.exit(1) if obj["ContentType"] != "application/json": LOGGER.error("Error copying object - content type") sys.exit(1) # replace the object itself try: client.copy( bucket_source=BUCKET, bucket_destination=BUCKET, path_source=path, path_destination=path, ) except Exception as ex: LOGGER.error(ex) sys.exit(1) @mock_s3 def test_delete_object() -> None: path = "test.json" mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) put_object(mock_client, path) client = module.Client() try: client.delete_object(bucket=f"{BUCKET}_bar", path=path) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) try: client.delete_object(bucket=BUCKET, path=f"{path}_bar") except Exception as ex: if type(ex).__name__ != "ObjectNotFound": LOGGER.error("Wrong error type to handle NoSuchKey error") sys.exit(1) client.delete_object(bucket=BUCKET, path=path) obj_list = mock_client.list_objects_v2(Bucket=BUCKET, Prefix=path) if "Contents" in obj_list: LOGGER.error("Error deleting object") sys.exit(1) @mock_s3 def test_delete_objects() -> None: paths = ["test.json", "test1.json"] mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) for path in paths: put_object(mock_client, path) client = module.Client() try: client.delete_objects(bucket=f"{BUCKET}_bar", paths=paths) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) try: client.delete_objects(bucket=BUCKET, paths=[*paths, "test_bar.json"]) except Exception as ex: if type(ex).__name__ != "ObjectNotFound": LOGGER.error("Wrong error type to handle NoSuchKey error") sys.exit(1) client.delete_objects(bucket=BUCKET, paths=paths) for path in paths: obj_list = mock_client.list_objects_v2(Bucket=BUCKET, Prefix=path) if "Contents" in obj_list: LOGGER.error("Error deleting object") sys.exit(1) @mock_s3 def test_move() -> None: path = "test.json" mock_client = boto3.client("s3") mock_client.create_bucket(Bucket=BUCKET) mock_client.create_bucket(Bucket=f"{BUCKET}_destination") put_object(mock_client, path) client = module.Client() try: client.move( bucket_source=f"{BUCKET}_bar", bucket_destination=f"{BUCKET}_destination", path_source=path, ) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) try: client.move( bucket_source=BUCKET, bucket_destination=f"{BUCKET}_destination", path_source=f"{path}_bar", ) except Exception as ex: if type(ex).__name__ != "ObjectNotFound": LOGGER.error("Wrong error type to handle NoSuchKey error") sys.exit(1) try: client.move( bucket_source=BUCKET, bucket_destination=f"{BUCKET}_destination_bar", path_source=path, ) except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) client.move( bucket_source=BUCKET, bucket_destination=f"{BUCKET}_destination", path_source=path, ) obj_list_source = mock_client.list_objects_v2(Bucket=BUCKET, Prefix=path) obj_list_destination = mock_client.list_objects_v2(Bucket=f"{BUCKET}_destination", Prefix=path) if "Contents" in obj_list_source and len(obj_list_destination["Contents"]) == 1: LOGGER.error("Error moving object") sys.exit(1) obj = mock_client.get_object(Bucket=f"{BUCKET}_destination", Key=path) if json.loads(obj["Body"].read()) != OBJ_CONTENT: LOGGER.error("Error copying object - content") sys.exit(1) if obj["ContentType"] != "application/json": LOGGER.error("Error copying object - content type") sys.exit(1) def test_exceptions() -> None: client = module.Client() try: client.list_buckets() except Exception as ex: if type(ex).__name__ != "ConnectionError": LOGGER.error("Wrong error type to handle access error") sys.exit(1) try: client.list_objects("") except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1) try: client.list_objects("t") except Exception as ex: if type(ex).__name__ != "BucketNotFound": LOGGER.error("Wrong error type to handle NoSuchBucket error") sys.exit(1)
31.438356
100
0.602669
2,176
18,360
4.887868
0.081801
0.062053
0.036856
0.051805
0.817131
0.796352
0.766924
0.73176
0.670741
0.652407
0
0.011963
0.266993
18,360
583
101
31.492281
0.778347
0.00512
0
0.651613
0
0
0.235639
0.01829
0
0
0
0
0
1
0.036559
false
0
0.019355
0
0.055914
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
5924e9896781e7e3e7fb3b91f233b0c24adf0dff
29
py
Python
python/queue/test/__init__.py
mario21ic/abstract-data-structures
2bf89db8cff4cf79c66d63eb84a96c33cda6cebd
[ "MIT" ]
null
null
null
python/queue/test/__init__.py
mario21ic/abstract-data-structures
2bf89db8cff4cf79c66d63eb84a96c33cda6cebd
[ "MIT" ]
null
null
null
python/queue/test/__init__.py
mario21ic/abstract-data-structures
2bf89db8cff4cf79c66d63eb84a96c33cda6cebd
[ "MIT" ]
null
null
null
from .queue import TestQueue
14.5
28
0.827586
4
29
6
1
0
0
0
0
0
0
0
0
0
0
0
0.137931
29
1
29
29
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
5950646a90e5da1c806b92ea4f610c1fb6403e41
146
py
Python
gluoncv/model_zoo/fpn/__init__.py
Angzz/gluon-cv-fpn
fc66b2c2fa97bc65e811aabe9d9d0cd16dc198ec
[ "Apache-2.0" ]
10
2019-01-16T10:08:37.000Z
2022-02-23T06:13:30.000Z
gluoncv/model_zoo/fpn/__init__.py
Angzz/gluon-cv-fpn
fc66b2c2fa97bc65e811aabe9d9d0cd16dc198ec
[ "Apache-2.0" ]
1
2019-01-18T06:42:59.000Z
2019-01-18T12:26:11.000Z
gluoncv/model_zoo/fpn/__init__.py
Angzz/gluon-cv-fpn
fc66b2c2fa97bc65e811aabe9d9d0cd16dc198ec
[ "Apache-2.0" ]
1
2019-08-30T12:03:58.000Z
2019-08-30T12:03:58.000Z
# pylint: disable=wildcard-import """Feature Pyramid Networks for Object Detection.""" from __future__ import absolute_import from .fpn import *
24.333333
52
0.787671
18
146
6.111111
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.123288
146
5
53
29.2
0.859375
0.541096
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
59513023e24543daebceff77b4dde77a52375a46
184
py
Python
example/efilling/apps.py
realnoobs/django-efilling
c105671d122d2f9b35320f12d9bb616f23ccd735
[ "MIT" ]
1
2021-09-15T17:12:13.000Z
2021-09-15T17:12:13.000Z
example/efilling/apps.py
realnoobs/django-efilling
c105671d122d2f9b35320f12d9bb616f23ccd735
[ "MIT" ]
null
null
null
example/efilling/apps.py
realnoobs/django-efilling
c105671d122d2f9b35320f12d9bb616f23ccd735
[ "MIT" ]
1
2021-09-12T14:08:05.000Z
2021-09-12T14:08:05.000Z
from django.apps import AppConfig as BaseAppConfig class AppConfig(BaseAppConfig): name = "example.efilling" label = "example_efilling" verbose_name = "Example Efilling"
23
50
0.75
20
184
6.8
0.65
0.330882
0.279412
0
0
0
0
0
0
0
0
0
0.173913
184
7
51
26.285714
0.894737
0
0
0
0
0
0.26087
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
3cc449bfce0d1a7fc1ae281d24df391b5822c948
207
py
Python
gan/modern-art-gen/settings.py
m3ngineer/ds-cookbook
933b4518e296c777555f742c908438eb12ae760f
[ "MIT" ]
null
null
null
gan/modern-art-gen/settings.py
m3ngineer/ds-cookbook
933b4518e296c777555f742c908438eb12ae760f
[ "MIT" ]
null
null
null
gan/modern-art-gen/settings.py
m3ngineer/ds-cookbook
933b4518e296c777555f742c908438eb12ae760f
[ "MIT" ]
null
null
null
import os # Path to training dataset ORIGINAL_IMAGES_PATH = os.environ.get('ORIGINAL_IMAGES_PATH') # Path to store thumbnails of training dataset RESIZED_IMAGES_PATH = os.environ.get('RESIZED_IMAGES_PATH')
29.571429
61
0.816425
31
207
5.193548
0.451613
0.248447
0.223602
0.236025
0.273292
0
0
0
0
0
0
0
0.10628
207
6
62
34.5
0.87027
0.333333
0
0
0
0
0.288889
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
3cdf85384e49fcc0bdcd42c654ae6e6cb6aa0d8b
129
py
Python
FaceSwap-master/pytorch_stylegan_encoder/InterFaceGAN/models/stylegan_tf_official/dnnlib/submission/run_context.py
CSID-DGU/-2020-1-OSSP1-ninetynine-2
b1824254882eeea0ee44e4e60896b72c51ef1d2c
[ "MIT" ]
1
2020-06-21T13:45:26.000Z
2020-06-21T13:45:26.000Z
FaceSwap-master/pytorch_stylegan_encoder/InterFaceGAN/models/stylegan_tf_official/dnnlib/submission/run_context.py
CSID-DGU/-2020-1-OSSP1-ninetynine-2
b1824254882eeea0ee44e4e60896b72c51ef1d2c
[ "MIT" ]
null
null
null
FaceSwap-master/pytorch_stylegan_encoder/InterFaceGAN/models/stylegan_tf_official/dnnlib/submission/run_context.py
CSID-DGU/-2020-1-OSSP1-ninetynine-2
b1824254882eeea0ee44e4e60896b72c51ef1d2c
[ "MIT" ]
3
2020-09-02T03:18:45.000Z
2021-01-27T08:24:05.000Z
version https://git-lfs.github.com/spec/v1 oid sha256:826c78ae9230314c70bd91ec7d777a152cc4afbfe99a1da1c544153d819a2b84 size 4349
32.25
75
0.883721
13
129
8.769231
1
0
0
0
0
0
0
0
0
0
0
0.390244
0.046512
129
3
76
43
0.536585
0
0
0
0
0
0
0
0
1
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
null
1
0
0
0
1
0
0
0
0
0
0
0
0
5
3cfda4b519d16e9bba71990c255e8e086aed5b86
3,323
py
Python
tavern/testutils/comparators.py
rubyszu/tavern
47766d1474f5f2b951ca482c41c055d4c79a7b13
[ "MIT" ]
null
null
null
tavern/testutils/comparators.py
rubyszu/tavern
47766d1474f5f2b951ca482c41c055d4c79a7b13
[ "MIT" ]
null
null
null
tavern/testutils/comparators.py
rubyszu/tavern
47766d1474f5f2b951ca482c41c055d4c79a7b13
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- def dict_equal(src_data,dst_data): assert type(src_data) == type(dst_data),"type: '{}' != '{}'".format(type(src_data), type(dst_data)) if isinstance(src_data,dict): for key in src_data: assert dst_data.has_key(key) dict_equal(src_data[key],dst_data[key]) elif isinstance(src_data,list): for src_list, dst_list in zip(sorted(src_data), sorted(dst_data)): dict_equal(src_list, dst_list) else: assert src_data == dst_data,"value '{}' != '{}'".format(src_data, dst_data) def list_equal(src_data, dst_data): assert len(src_data) == len(dst_data) if len(src_data) == len(dst_data): for i in range(len(src_data)): dict_equal(src_data[i], dst_data[i]) def equal(check_value, expect_value): assert check_value == expect_value def less_than(check_value, expect_value): assert check_value < expect_value def less_than_or_equals(check_value, expect_value): assert check_value <= expect_value def greater_than(check_value, expect_value): assert check_value > expect_value def greater_than_or_equals(check_value, expect_value): assert check_value >= expect_value def not_equals(check_value, expect_value): assert check_value != expect_value def string_equals(check_value, expect_value): assert builtin_str(check_value) == builtin_str(expect_value) def length_equals(check_value, expect_value): assert isinstance(expect_value, integer_types) assert len(check_value) == expect_value def length_greater_than(check_value, expect_value): assert isinstance(expect_value, integer_types) assert len(check_value) > expect_value def length_greater_than_or_equals(check_value, expect_value): assert isinstance(expect_value, integer_types) assert len(check_value) >= expect_value def length_less_than(check_value, expect_value): assert isinstance(expect_value, integer_types) assert len(check_value) < expect_value def length_less_than_or_equals(check_value, expect_value): assert isinstance(expect_value, integer_types) assert len(check_value) <= expect_value def contains(check_value, expect_value): assert isinstance(check_value, (list, tuple, dict, basestring)) assert expect_value in check_value def contained_by(check_value, expect_value): assert isinstance(expect_value, (list, tuple, dict, basestring)) assert check_value in expect_value def type_match(check_value, expect_value): def get_type(name): if isinstance(name, type): return name elif isinstance(name, basestring): try: return __builtins__[name] except KeyError: raise ValueError(name) else: raise ValueError(name) assert isinstance(check_value, get_type(expect_value)) def regex_match(check_value, expect_value): assert isinstance(expect_value, basestring) assert isinstance(check_value, basestring) assert re.match(expect_value, check_value) def startswith(check_value, expect_value): assert builtin_str(check_value).startswith(builtin_str(expect_value)) def endswith(check_value, expect_value): assert builtin_str(check_value).endswith(builtin_str(expect_value))
35.351064
103
0.719831
454
3,323
4.914097
0.132159
0.212013
0.207978
0.272972
0.66069
0.623935
0.51636
0.51636
0.47333
0.41013
0
0.000371
0.187782
3,323
94
104
35.351064
0.826232
0.00632
0
0.126761
0
0
0.010906
0
0
0
0
0
0.43662
1
0.295775
false
0
0
0
0.323944
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
0
0
0
5
a7394acf7227facb07b975690f92ba5aabcb481d
130
py
Python
examples/docker/emloop_example/dummy/__init__.py
iterait/shepherd
0847c9885584378dd68a48c40d03f9bb02b2b57c
[ "MIT" ]
5
2018-10-13T19:03:07.000Z
2019-02-25T06:44:27.000Z
examples/docker/emloop_example/dummy/__init__.py
iterait/shepherd
0847c9885584378dd68a48c40d03f9bb02b2b57c
[ "MIT" ]
62
2018-09-13T08:03:39.000Z
2022-01-03T09:05:54.000Z
examples/docker/emloop_example/dummy/__init__.py
iterait/shepherd
0847c9885584378dd68a48c40d03f9bb02b2b57c
[ "MIT" ]
null
null
null
from .dummy_dataset import DummyDataset from .dummy_model import DummyModel from .post_process_dataset import PostProcessDataset
26
52
0.876923
16
130
6.875
0.625
0.163636
0
0
0
0
0
0
0
0
0
0
0.1
130
4
53
32.5
0.940171
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
595c88dc21f98daed29cce83946c50a8db669e6c
136
py
Python
django/app_whoami/urls.py
a-rey/aaronmreyes_heroku
f397741ec33a35c318b6e4d51837b352183085f9
[ "MIT" ]
1
2022-03-12T22:23:44.000Z
2022-03-12T22:23:44.000Z
django/app_whoami/urls.py
a-rey/docker_website
f397741ec33a35c318b6e4d51837b352183085f9
[ "MIT" ]
2
2020-04-07T22:09:50.000Z
2020-04-07T22:09:50.000Z
django/app_whoami/urls.py
a-rey/docker_website
f397741ec33a35c318b6e4d51837b352183085f9
[ "MIT" ]
null
null
null
import django.conf.urls import app_whoami.views urlpatterns = [ django.conf.urls.url(r'^$', app_whoami.views.main, name='main'), ]
15.111111
66
0.713235
20
136
4.75
0.6
0.210526
0.294737
0
0
0
0
0
0
0
0
0
0.117647
136
8
67
17
0.791667
0
0
0
0
0
0.044118
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
59778ad8e3aac09bdb8cfae07283d5c76df2e611
415
py
Python
PruebaFinal/apps/encargado/models.py
Estuardiaz2611/PruebaFinalC
3bec3f5f9c158744827e633c01d6a106125f56f3
[ "MIT" ]
1
2020-05-28T19:40:40.000Z
2020-05-28T19:40:40.000Z
PruebaFinal/apps/encargado/models.py
Estuardiaz2611/PruebaFinalC
3bec3f5f9c158744827e633c01d6a106125f56f3
[ "MIT" ]
null
null
null
PruebaFinal/apps/encargado/models.py
Estuardiaz2611/PruebaFinalC
3bec3f5f9c158744827e633c01d6a106125f56f3
[ "MIT" ]
null
null
null
from django.db import models # Create your models here. class Encargado(models.Model): nombre_encargado = models.CharField(max_length=50) apellido_encargado = models.CharField(max_length=50) genero = models.CharField(max_length=10) nombre_usuario = models.CharField(max_length=20) fecha_nacimiento = models.DateField() def __str__(self): return '{}'.format(self.nombre_encargado)
29.642857
56
0.742169
52
415
5.673077
0.557692
0.20339
0.244068
0.325424
0.237288
0.237288
0
0
0
0
0
0.022857
0.156627
415
13
57
31.923077
0.82
0.057831
0
0
0
0
0.005141
0
0
0
0
0
0
1
0.111111
false
0
0.111111
0.111111
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
59797193d4d89e3913f0f6cd9235aa982ca818a3
87
py
Python
models/__init__.py
sanjaybasu/claimsrisk
13fcb365232a3c6ef643dc583e872a8b8852bf37
[ "MIT" ]
1
2021-06-14T20:11:03.000Z
2021-06-14T20:11:03.000Z
models/__init__.py
sanjaybasu/claimsrisk
13fcb365232a3c6ef643dc583e872a8b8852bf37
[ "MIT" ]
null
null
null
models/__init__.py
sanjaybasu/claimsrisk
13fcb365232a3c6ef643dc583e872a8b8852bf37
[ "MIT" ]
1
2020-01-31T19:54:52.000Z
2020-01-31T19:54:52.000Z
from .LinReg import LinReg from .XGBoost import XGBoost from .LightGBM import LightGBM
21.75
30
0.827586
12
87
6
0.416667
0
0
0
0
0
0
0
0
0
0
0
0.137931
87
3
31
29
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
597eb3779d9694f14a5de3ad55ff8c6e40342374
101
py
Python
src/__init__.py
lauryndbrown/AdvancedLaneFinding
644633623d01db2d267c9090d55853633f09e050
[ "MIT" ]
null
null
null
src/__init__.py
lauryndbrown/AdvancedLaneFinding
644633623d01db2d267c9090d55853633f09e050
[ "MIT" ]
null
null
null
src/__init__.py
lauryndbrown/AdvancedLaneFinding
644633623d01db2d267c9090d55853633f09e050
[ "MIT" ]
null
null
null
#from __future__ import absolute_import import os os.path.dirname(os.path.abspath('')) print(os.path)
25.25
39
0.792079
16
101
4.6875
0.5625
0.24
0
0
0
0
0
0
0
0
0
0
0.069307
101
4
40
25.25
0.797872
0.376238
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.333333
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
5994ac58aa4b297c98a2234d119d32ff078f86ef
170
py
Python
Begining/admin.py
Waleed-Daud/Alzitoona
0f8cd859dfab722050e56dc3001cd5a6c1440c97
[ "Apache-2.0" ]
null
null
null
Begining/admin.py
Waleed-Daud/Alzitoona
0f8cd859dfab722050e56dc3001cd5a6c1440c97
[ "Apache-2.0" ]
null
null
null
Begining/admin.py
Waleed-Daud/Alzitoona
0f8cd859dfab722050e56dc3001cd5a6c1440c97
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin # Register your models here. from Begining.models import Student,Professor admin.site.register(Student) admin.site.register(Professor)
21.25
45
0.823529
23
170
6.086957
0.565217
0.128571
0.242857
0
0
0
0
0
0
0
0
0
0.1
170
7
46
24.285714
0.915033
0.152941
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
59a2b9baf24f3530e907183c528c273725d11e26
152
py
Python
src/category/apps.py
NestorMonroy/GreatKart
c417faed7e1ec430fd676b58f618cb66e7c07785
[ "MIT" ]
null
null
null
src/category/apps.py
NestorMonroy/GreatKart
c417faed7e1ec430fd676b58f618cb66e7c07785
[ "MIT" ]
null
null
null
src/category/apps.py
NestorMonroy/GreatKart
c417faed7e1ec430fd676b58f618cb66e7c07785
[ "MIT" ]
null
null
null
""" Category app """ from django.apps import AppConfig class CategoryAppConfig(AppConfig): """ Category app config """ name = 'src.category'
16.888889
35
0.677632
16
152
6.4375
0.75
0.213592
0
0
0
0
0
0
0
0
0
0
0.190789
152
8
36
19
0.837398
0.217105
0
0
0
0
0.114286
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
59bb0ddb8d7ada50d005208b1d40d0f9b202d0a0
23
py
Python
Chapter 01/Chap01_Example1.174.py
Anancha/Programming-Techniques-using-Python
e80c329d2a27383909d358741a5cab03cb22fd8b
[ "MIT" ]
null
null
null
Chapter 01/Chap01_Example1.174.py
Anancha/Programming-Techniques-using-Python
e80c329d2a27383909d358741a5cab03cb22fd8b
[ "MIT" ]
null
null
null
Chapter 01/Chap01_Example1.174.py
Anancha/Programming-Techniques-using-Python
e80c329d2a27383909d358741a5cab03cb22fd8b
[ "MIT" ]
null
null
null
print(complex('Seven'))
23
23
0.73913
3
23
5.666667
1
0
0
0
0
0
0
0
0
0
0
0
0
23
1
23
23
0.73913
0
0
0
0
0
0.208333
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
59d0aa590b5e8abd13aa5652b58b2992b4a91f7f
148
py
Python
BasicExerciseAndKnowledge/w3cschool/n40_reverse_a_list.py
Jonathan1214/learn-python
19d0299b30e953069f19402bff5c464c4d5580be
[ "MIT" ]
null
null
null
BasicExerciseAndKnowledge/w3cschool/n40_reverse_a_list.py
Jonathan1214/learn-python
19d0299b30e953069f19402bff5c464c4d5580be
[ "MIT" ]
null
null
null
BasicExerciseAndKnowledge/w3cschool/n40_reverse_a_list.py
Jonathan1214/learn-python
19d0299b30e953069f19402bff5c464c4d5580be
[ "MIT" ]
null
null
null
#coding:utf-8 # 题目:将一个数组逆序输出。 lt = [12, 34, 45, 43] def print_in_reverse(lt): for item in lt[::-1]: print item, print_in_reverse(lt)
13.454545
25
0.614865
26
148
3.346154
0.653846
0.16092
0.321839
0.367816
0
0
0
0
0
0
0
0.086957
0.222973
148
10
26
14.8
0.669565
0.175676
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0.6
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
ab73c6336a48fb0e20223352874a6f2440af6957
166
py
Python
retinanet/dataloader/__init__.py
lchen-wyze/retinanet-tensorflow2.x
86404a2da6ec636d4b1aef768ac52f018c127798
[ "Apache-2.0" ]
36
2020-09-23T13:32:47.000Z
2022-03-29T18:53:58.000Z
retinanet/dataloader/__init__.py
lchen-wyze/retinanet-tensorflow2.x
86404a2da6ec636d4b1aef768ac52f018c127798
[ "Apache-2.0" ]
12
2020-10-25T09:07:58.000Z
2021-11-17T12:53:50.000Z
retinanet/dataloader/__init__.py
lchen-wyze/retinanet-tensorflow2.x
86404a2da6ec636d4b1aef768ac52f018c127798
[ "Apache-2.0" ]
9
2020-11-12T20:03:06.000Z
2022-01-03T12:40:48.000Z
from retinanet.dataloader.input_pipeline import InputPipeline from retinanet.dataloader.utils import normalize_image __all__ = ['InputPipeline', 'normalize_image']
27.666667
61
0.843373
18
166
7.388889
0.611111
0.195489
0.345865
0
0
0
0
0
0
0
0
0
0.084337
166
5
62
33.2
0.875
0
0
0
0
0
0.168675
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
ab7e979e821166a0728af2dfbd80eb7cfe49b239
2,258
py
Python
tests/test_MessengerHookAttachments.py
iizs/PyDooray
ec42d8814fbe6936469f8f25c9cd1e7222fab23e
[ "MIT" ]
17
2021-12-05T23:36:01.000Z
2022-01-08T04:41:52.000Z
tests/test_MessengerHookAttachments.py
iizs/PyDooray
ec42d8814fbe6936469f8f25c9cd1e7222fab23e
[ "MIT" ]
null
null
null
tests/test_MessengerHookAttachments.py
iizs/PyDooray
ec42d8814fbe6936469f8f25c9cd1e7222fab23e
[ "MIT" ]
null
null
null
import unittest import dooray class TestMessengerHookAttachments(unittest.TestCase): def testCreateAttachment(self): self.assertEqual(dooray.MessengerHookAttachments._create_attachment(None, None, None, None), None) self.assertEqual( dooray.MessengerHookAttachments._create_attachment('My title', None, None, None), {'title': 'My title'} ) self.assertEqual( dooray.MessengerHookAttachments._create_attachment('My title', 'http://a.com', None, None), {'title': 'My title', 'titleLink': 'http://a.com'} ) self.assertEqual( dooray.MessengerHookAttachments._create_attachment('My title', 'http://a.com', 'My text', None), {'title': 'My title', 'titleLink': 'http://a.com', 'text': 'My text'} ) self.assertEqual( dooray.MessengerHookAttachments._create_attachment('My title', 'http://a.com', 'My text', 'red'), {'title': 'My title', 'titleLink': 'http://a.com', 'text': 'My text', 'color': 'red'} ) self.assertEqual( dooray.MessengerHookAttachments._create_attachment('My title', None, 'My text', 'red'), {'title': 'My title', 'text': 'My text', 'color': 'red'} ) self.assertEqual( dooray.MessengerHookAttachments._create_attachment('My title', 'http://a.com', None, 'red'), {'title': 'My title', 'titleLink': 'http://a.com', 'color': 'red'} ) self.assertEqual( dooray.MessengerHookAttachments._create_attachment(None, 'http://a.com', 'My text', 'red'), {'titleLink': 'http://a.com', 'text': 'My text', 'color': 'red'} ) def testBuilder(self): self.assertEqual( dooray.MessengerHookAttachments.builder() .add_attachment(title='My title') .create(), [{'title': 'My title'}] ) self.assertEqual( dooray.MessengerHookAttachments.builder() .add_attachment(title='My title') .add_attachment(title_link='http://b.com', text='My text') .create(), [{'title': 'My title'}, {'titleLink': 'http://b.com', 'text': 'My text'}] )
43.423077
109
0.570416
218
2,258
5.816514
0.12844
0.088328
0.165615
0.35489
0.87224
0.84306
0.7847
0.711356
0.600158
0.512618
0
0
0.26085
2,258
51
110
44.27451
0.759736
0
0
0.319149
0
0
0.221435
0
0
0
0
0
0.212766
1
0.042553
false
0
0.042553
0
0.106383
0
0
0
0
null
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
abaff3c431ad4030441ad4e634e3de19421837d5
112
py
Python
molecule_ignite/test/conftest.py
ragingpastry/molecule-ignite
aaf005cabba9a8c933191458cf8553da9bac581d
[ "MIT" ]
17
2020-02-19T08:16:49.000Z
2022-02-05T08:16:42.000Z
molecule_ignite/test/conftest.py
ragingpastry/molecule-ignite
aaf005cabba9a8c933191458cf8553da9bac581d
[ "MIT" ]
13
2020-02-18T15:32:06.000Z
2022-03-31T10:58:34.000Z
molecule_ignite/test/conftest.py
ragingpastry/molecule-ignite
aaf005cabba9a8c933191458cf8553da9bac581d
[ "MIT" ]
11
2020-02-18T16:24:29.000Z
2022-03-28T11:44:51.000Z
"""Pytest Fixtures.""" import pytest # noqa from molecule.test.conftest import random_string, temp_dir # noqa
28
66
0.758929
15
112
5.533333
0.8
0
0
0
0
0
0
0
0
0
0
0
0.133929
112
3
67
37.333333
0.85567
0.241071
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
abe1d93fbf63dc5adb730da8c93bcefc1e3efb5e
105
py
Python
fusion/scheduling/batch_size.py
SheaCai/optimus
e9a9a2354376c786d7e6c64e34dee2c2010e5585
[ "MIT" ]
3
2021-05-05T06:55:38.000Z
2022-02-19T06:19:59.000Z
fusion/scheduling/batch_size.py
SheaCai/optimus
e9a9a2354376c786d7e6c64e34dee2c2010e5585
[ "MIT" ]
null
null
null
fusion/scheduling/batch_size.py
SheaCai/optimus
e9a9a2354376c786d7e6c64e34dee2c2010e5585
[ "MIT" ]
null
null
null
def init(bs): global batch_size batch_size = bs def get_batch_size(): return batch_size
9.545455
21
0.666667
16
105
4.0625
0.5
0.553846
0
0
0
0
0
0
0
0
0
0
0.266667
105
10
22
10.5
0.844156
0
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0
0.2
0.6
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
e6110dc25d5258c7631b8bea33a6beb104cc6f69
790
py
Python
Taller Estructuras de Control Selectivas/punto10.py
Diegof2612/Algoritmos-y-Programaci-n-Talleres
2aa645adac5d4de1037b60138e7b3a09807b3f13
[ "MIT" ]
null
null
null
Taller Estructuras de Control Selectivas/punto10.py
Diegof2612/Algoritmos-y-Programaci-n-Talleres
2aa645adac5d4de1037b60138e7b3a09807b3f13
[ "MIT" ]
null
null
null
Taller Estructuras de Control Selectivas/punto10.py
Diegof2612/Algoritmos-y-Programaci-n-Talleres
2aa645adac5d4de1037b60138e7b3a09807b3f13
[ "MIT" ]
1
2021-08-04T16:40:29.000Z
2021-08-04T16:40:29.000Z
""" ENTRADAS salario_bruto-->float-->salario_bruto SALIDAS sueldo_neto-->float-->salario_neto """ salario_bruto=float(input("Digite el salario bruto ") sueldo_neto=0.0#float if(salario_bruto>=5_000_000): salario_neto=salario_bruto*0.10+salario_bruto print("El salario neto es: ",salario_neto) elif(salario_bruto>=4_300_000): salario_neto=salario_bruto*0.15+salario_bruto print("El salario neto es ",salario_neto) elif(salario_bruto>=3_600_000): salario_neto=salario_bruto*0.20+salario_bruto print("El salario neto es ",salario_neto) elif(salario_bruto>=2_000_000): salario_neto=salario_bruto*0.40+salario_bruto print("El salario neto es ",salario_neto) elif(salario_bruto>=900_000): salario_neto=salario_bruto*0.60+salario_bruto print("El salario neto es ",salario_neto)
34.347826
53
0.792405
129
790
4.542636
0.217054
0.389079
0.1843
0.235495
0.716724
0.716724
0.578498
0.476109
0.476109
0.40273
0
0.070442
0.083544
790
23
54
34.347826
0.73895
0
0
0.235294
0
0
0.174419
0
0
0
0
0
0
0
null
null
0
0
null
null
0.294118
0
0
0
null
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
e665496d739cfabf457f8edcdeaf68e80f3f0c5f
73
py
Python
src/constellix/protocol/__init__.py
aperim/python-constellix
11cb19fce5cc00aefd14f8ac6bf63dc2f98731ae
[ "CC0-1.0" ]
null
null
null
src/constellix/protocol/__init__.py
aperim/python-constellix
11cb19fce5cc00aefd14f8ac6bf63dc2f98731ae
[ "CC0-1.0" ]
null
null
null
src/constellix/protocol/__init__.py
aperim/python-constellix
11cb19fce5cc00aefd14f8ac6bf63dc2f98731ae
[ "CC0-1.0" ]
null
null
null
"""Protocol Support for Constellix API""" from .protocol import Protocol
24.333333
41
0.780822
9
73
6.333333
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.123288
73
3
42
24.333333
0.890625
0.479452
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
054a24e69f73d6f042e41e5b4f45fe80f621042a
219
py
Python
scrape/src/__main__.py
kagemeka/akibasouken-python
0805404b0c2b3acefa45a28610a85cacc7771450
[ "MIT" ]
null
null
null
scrape/src/__main__.py
kagemeka/akibasouken-python
0805404b0c2b3acefa45a28610a85cacc7771450
[ "MIT" ]
null
null
null
scrape/src/__main__.py
kagemeka/akibasouken-python
0805404b0c2b3acefa45a28610a85cacc7771450
[ "MIT" ]
null
null
null
import typing from lib.adam import update_current_animes def main(): update_current_animes() def lambda_handler(event, context) -> typing.NoReturn: update_current_animes() if __name__ == '__main__': main()
13.6875
54
0.748858
28
219
5.321429
0.607143
0.261745
0.38255
0.295302
0
0
0
0
0
0
0
0
0.150685
219
16
55
13.6875
0.801075
0
0
0.25
0
0
0.036364
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
055b80ba6fa663a5c08682d83086afe31145a153
98
py
Python
python/basics/random-number.py
u1i/snippets
cc61b5ecaede1d1013df51c7b1b6ab10d927f95c
[ "MIT" ]
1
2018-06-24T15:40:40.000Z
2018-06-24T15:40:40.000Z
python/basics/random-number.py
u1i/snippets
cc61b5ecaede1d1013df51c7b1b6ab10d927f95c
[ "MIT" ]
null
null
null
python/basics/random-number.py
u1i/snippets
cc61b5ecaede1d1013df51c7b1b6ab10d927f95c
[ "MIT" ]
null
null
null
from random import randint # print random number between 0 and 9 (inclusive) print(randint(0,9))
19.6
49
0.765306
16
98
4.6875
0.6875
0
0
0
0
0
0
0
0
0
0
0.048193
0.153061
98
4
50
24.5
0.855422
0.479592
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
552c58b9afcecafbc55cf66bf03ae6389c79263a
262
py
Python
stud_api/student/models.py
Neeraj2701/numpy
bbc3167427eb8ecafeee3c5c9606b3532405dd96
[ "BSD-3-Clause" ]
null
null
null
stud_api/student/models.py
Neeraj2701/numpy
bbc3167427eb8ecafeee3c5c9606b3532405dd96
[ "BSD-3-Clause" ]
null
null
null
stud_api/student/models.py
Neeraj2701/numpy
bbc3167427eb8ecafeee3c5c9606b3532405dd96
[ "BSD-3-Clause" ]
null
null
null
from django.db import models class Stud(models.Model): Stud_ID=models.CharField(max_length=10) Stud_Name=models.CharField(max_length=20) Department=models.CharField(max_length=15) Marks=models.IntegerField() def __str__(self): return self.Stud_Name
18.714286
43
0.790076
39
262
5.051282
0.589744
0.228426
0.274112
0.365482
0
0
0
0
0
0
0
0.025532
0.103053
262
13
44
20.153846
0.812766
0
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0.125
0.125
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
5
554874a08b9af6eb97c17e6ce0b9feffc93efbd0
472
py
Python
pyrealtime/__init__.py
ewhitmire/pyrealtime
5cfd37ff7b05cf33d2aab9b9f45188ddf7c76db4
[ "MIT" ]
62
2017-07-27T18:09:14.000Z
2021-07-19T00:09:40.000Z
pyrealtime/__init__.py
ewhitmire/pyrealtime
5cfd37ff7b05cf33d2aab9b9f45188ddf7c76db4
[ "MIT" ]
24
2017-06-24T03:26:45.000Z
2020-11-11T15:24:29.000Z
pyrealtime/__init__.py
ewhitmire/pyrealtime
5cfd37ff7b05cf33d2aab9b9f45188ddf7c76db4
[ "MIT" ]
15
2017-07-02T23:22:25.000Z
2020-10-28T15:23:58.000Z
# from .audio_layers import * from .decode_layer import * from .input_layers import * from .layer import * from .layer_manager import * from .network_layers import * from .pygame import * from .record_layer import * from .script_layers import * from .serial_layer import * from .utility_layers import * from .utils import * from .plotting import * from .subprocess import * from .filter_layers import * from .decorators import * from .buffers import * from .nidaq import *
26.222222
29
0.769068
64
472
5.515625
0.328125
0.481586
0.271955
0
0
0
0
0
0
0
0
0
0.152542
472
18
30
26.222222
0.8825
0.057203
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
5556eed02044751bc94ee833a2c89fa805f8a02d
42
py
Python
algo/__init__.py
softarts/tradebook
bdce9206e94147e93c547edb2d1d2aebd17f2f9c
[ "Apache-2.0" ]
null
null
null
algo/__init__.py
softarts/tradebook
bdce9206e94147e93c547edb2d1d2aebd17f2f9c
[ "Apache-2.0" ]
null
null
null
algo/__init__.py
softarts/tradebook
bdce9206e94147e93c547edb2d1d2aebd17f2f9c
[ "Apache-2.0" ]
null
null
null
#from . import period_perf as period_perf2
42
42
0.833333
7
42
4.714286
0.857143
0
0
0
0
0
0
0
0
0
0
0.027027
0.119048
42
1
42
42
0.864865
0.97619
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
556645ed45c0298764132b4eeb8302441534b013
90
py
Python
src/error.py
kbinani/dxrip
ef170fd895c6d9bb6c05bc2026e9fa9bcd0b1908
[ "MIT" ]
4
2015-09-23T14:12:07.000Z
2021-10-04T21:03:32.000Z
src/error.py
kbinani/dxrip
ef170fd895c6d9bb6c05bc2026e9fa9bcd0b1908
[ "MIT" ]
null
null
null
src/error.py
kbinani/dxrip
ef170fd895c6d9bb6c05bc2026e9fa9bcd0b1908
[ "MIT" ]
2
2018-06-26T14:59:11.000Z
2021-09-01T01:50:20.000Z
class Error(Exception): def __init__(self, messages): self.messages = messages
30
33
0.688889
10
90
5.8
0.7
0.413793
0
0
0
0
0
0
0
0
0
0
0.211111
90
3
34
30
0.816901
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
5573c6a5d7f8d7e9760db721b7348f9dbb10e14a
166
py
Python
util/generic/benchmark/fastclp2/metrics/main.py
jochenater/catboost
de2786fbc633b0d6ea6a23b3862496c6151b95c2
[ "Apache-2.0" ]
6,989
2017-07-18T06:23:18.000Z
2022-03-31T15:58:36.000Z
util/generic/benchmark/fastclp2/metrics/main.py
jochenater/catboost
de2786fbc633b0d6ea6a23b3862496c6151b95c2
[ "Apache-2.0" ]
1,978
2017-07-18T09:17:58.000Z
2022-03-31T14:28:43.000Z
util/generic/benchmark/fastclp2/metrics/main.py
jochenater/catboost
de2786fbc633b0d6ea6a23b3862496c6151b95c2
[ "Apache-2.0" ]
1,228
2017-07-18T09:03:13.000Z
2022-03-29T05:57:40.000Z
import yatest.common as yc def test_export_metrics(metrics): metrics.set_benchmark(yc.execute_benchmark('util/generic/benchmark/fastclp2/fastclp2', threads=8))
27.666667
102
0.807229
23
166
5.652174
0.73913
0.215385
0
0
0
0
0
0
0
0
0
0.019737
0.084337
166
5
103
33.2
0.835526
0
0
0
0
0
0.240964
0.240964
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
0
0
0
5
557f8392992c5efaf65b97edc31fb7606c3623a1
227
py
Python
src/ctc/evm/block_utils/__init__.py
fei-protocol/checkthechain
ec838f3d0d44af228f45394d9ba8d8eb7f677520
[ "MIT" ]
94
2022-02-15T19:34:49.000Z
2022-03-26T19:26:22.000Z
src/ctc/evm/block_utils/__init__.py
fei-protocol/checkthechain
ec838f3d0d44af228f45394d9ba8d8eb7f677520
[ "MIT" ]
7
2022-03-03T02:58:47.000Z
2022-03-11T18:41:05.000Z
src/ctc/evm/block_utils/__init__.py
fei-protocol/checkthechain
ec838f3d0d44af228f45394d9ba8d8eb7f677520
[ "MIT" ]
7
2022-02-15T17:53:07.000Z
2022-03-17T19:14:17.000Z
from .block_analysis import * from .block_chunks import * from .block_creations import * from .block_crud import * from .block_gas import * from .block_normalize import * from .block_summary import * from .block_times import *
25.222222
30
0.788546
32
227
5.34375
0.34375
0.421053
0.614035
0
0
0
0
0
0
0
0
0
0.140969
227
8
31
28.375
0.876923
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
e96500ca8ce853595ee1940c1977e338dd6e6ae3
35,927
py
Python
dpctl/tensor/_ctors.py
IntelPython/pydppl
e032bc35459b818eae6b0009d13335aa512d3aac
[ "Apache-2.0" ]
5
2020-07-15T13:29:18.000Z
2020-09-15T15:39:30.000Z
dpctl/tensor/_ctors.py
IntelPython/pydppl
e032bc35459b818eae6b0009d13335aa512d3aac
[ "Apache-2.0" ]
39
2020-07-31T17:27:14.000Z
2020-09-22T18:56:42.000Z
dpctl/tensor/_ctors.py
IntelPython/pydppl
e032bc35459b818eae6b0009d13335aa512d3aac
[ "Apache-2.0" ]
4
2020-08-05T17:43:48.000Z
2020-09-14T19:53:12.000Z
# Data Parallel Control (dpctl) # # Copyright 2020-2021 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import dpctl import dpctl.memory as dpm import dpctl.tensor as dpt import dpctl.tensor._tensor_impl as ti import dpctl.utils from dpctl.tensor._device import normalize_queue_device _empty_tuple = tuple() _host_set = frozenset([None]) def _get_dtype(dtype, sycl_obj, ref_type=None): if dtype is None: if ref_type in [None, float] or np.issubdtype(ref_type, np.floating): dtype = ti.default_device_fp_type(sycl_obj) return np.dtype(dtype) elif ref_type in [bool, np.bool_]: dtype = ti.default_device_bool_type(sycl_obj) return np.dtype(dtype) elif ref_type is int or np.issubdtype(ref_type, np.integer): dtype = ti.default_device_int_type(sycl_obj) return np.dtype(dtype) elif ref_type is complex or np.issubdtype(ref_type, np.complexfloating): dtype = ti.default_device_complex_type(sycl_obj) return np.dtype(dtype) else: raise ValueError(f"Reference type {ref_type} not recognized.") else: return np.dtype(dtype) def _array_info_dispatch(obj): if isinstance(obj, dpt.usm_ndarray): return obj.shape, obj.dtype, frozenset([obj.sycl_queue]) elif isinstance(obj, np.ndarray): return obj.shape, obj.dtype, _host_set elif isinstance(obj, range): return (len(obj),), int, _host_set elif isinstance(obj, bool): return _empty_tuple, bool, _host_set elif isinstance(obj, float): return _empty_tuple, float, _host_set elif isinstance(obj, int): return _empty_tuple, int, _host_set elif isinstance(obj, complex): return _empty_tuple, complex, _host_set elif isinstance(obj, (list, tuple, range)): return _array_info_sequence(obj) elif any( isinstance(obj, s) for s in [np.integer, np.floating, np.complexfloating, np.bool_] ): return _empty_tuple, obj.dtype, _host_set else: raise ValueError(type(obj)) def _array_info_sequence(li): assert isinstance(li, (list, tuple, range)) n = len(li) dim = None dt = None device = frozenset() for el in li: el_dim, el_dt, el_dev = _array_info_dispatch(el) if dim is None: dim = el_dim dt = np.promote_types(el_dt, el_dt) device = device.union(el_dev) elif el_dim == dim: dt = np.promote_types(dt, el_dt) device = device.union(el_dev) else: raise ValueError( "Inconsistent dimensions, {} and {}".format(dim, el_dim) ) if dim is None: dim = tuple() dt = float device = _host_set return (n,) + dim, dt, device def _asarray_from_usm_ndarray( usm_ndary, dtype=None, copy=None, usm_type=None, sycl_queue=None, order="K", ): if not isinstance(usm_ndary, dpt.usm_ndarray): raise TypeError( f"Expected dpctl.tensor.usm_ndarray, got {type(usm_ndary)}" ) if dtype is None: dtype = usm_ndary.dtype if usm_type is None: usm_type = usm_ndary.usm_type if sycl_queue is not None: exec_q = dpctl.utils.get_execution_queue( [usm_ndary.sycl_queue, sycl_queue] ) copy_q = normalize_queue_device(sycl_queue=sycl_queue, device=exec_q) else: copy_q = usm_ndary.sycl_queue # Conditions for zero copy: can_zero_copy = copy is not True # dtype is unchanged can_zero_copy = can_zero_copy and dtype == usm_ndary.dtype # USM allocation type is unchanged can_zero_copy = can_zero_copy and usm_type == usm_ndary.usm_type # sycl_queue is unchanged can_zero_copy = can_zero_copy and copy_q is usm_ndary.sycl_queue # order is unchanged c_contig = usm_ndary.flags & 1 f_contig = usm_ndary.flags & 2 fc_contig = usm_ndary.flags & 3 if can_zero_copy: if order == "C" and c_contig: pass elif order == "F" and f_contig: pass elif order == "A" and fc_contig: pass elif order == "K": pass else: can_zero_copy = False if copy is False and can_zero_copy is False: raise ValueError("asarray(..., copy=False) is not possible") if can_zero_copy: return usm_ndary if order == "A": order = "F" if f_contig and not c_contig else "C" if order == "K" and fc_contig: order = "C" if c_contig else "F" if order == "K": # new USM allocation res = dpt.usm_ndarray( usm_ndary.shape, dtype=dtype, buffer=usm_type, order="C", buffer_ctor_kwargs={"queue": copy_q}, ) original_strides = usm_ndary.strides ind = sorted( range(usm_ndary.ndim), key=lambda i: abs(original_strides[i]), reverse=True, ) new_strides = tuple(res.strides[ind[i]] for i in ind) # reuse previously made USM allocation res = dpt.usm_ndarray( usm_ndary.shape, dtype=res.dtype, buffer=res.usm_data, strides=new_strides, ) else: res = dpt.usm_ndarray( usm_ndary.shape, dtype=dtype, buffer=usm_type, order=order, buffer_ctor_kwargs={"queue": copy_q}, ) # FIXME: call copy_to when implemented res[(slice(None, None, None),) * res.ndim] = usm_ndary return res def _asarray_from_numpy_ndarray( ary, dtype=None, usm_type=None, sycl_queue=None, order="K" ): if not isinstance(ary, np.ndarray): raise TypeError(f"Expected numpy.ndarray, got {type(ary)}") if usm_type is None: usm_type = "device" if dtype is None: dtype = ary.dtype copy_q = normalize_queue_device(sycl_queue=None, device=sycl_queue) f_contig = ary.flags["F"] c_contig = ary.flags["C"] fc_contig = f_contig or c_contig if order == "A": order = "F" if f_contig and not c_contig else "C" if order == "K" and fc_contig: order = "C" if c_contig else "F" if order == "K": # new USM allocation res = dpt.usm_ndarray( ary.shape, dtype=dtype, buffer=usm_type, order="C", buffer_ctor_kwargs={"queue": copy_q}, ) original_strides = ary.strides ind = sorted( range(ary.ndim), key=lambda i: abs(original_strides[i]), reverse=True, ) new_strides = tuple(res.strides[ind[i]] for i in ind) # reuse previously made USM allocation res = dpt.usm_ndarray( res.shape, dtype=res.dtype, buffer=res.usm_data, strides=new_strides ) else: res = dpt.usm_ndarray( ary.shape, dtype=dtype, buffer=usm_type, order=order, buffer_ctor_kwargs={"queue": copy_q}, ) # FIXME: call copy_to when implemented res[(slice(None, None, None),) * res.ndim] = ary return res def _is_object_with_buffer_protocol(obj): "Returns True if object support Python buffer protocol" try: # use context manager to ensure # buffer is instantly released with memoryview(obj): return True except TypeError: return False def asarray( obj, dtype=None, device=None, copy=None, usm_type=None, sycl_queue=None, order="K", ): """ Converts `obj` to :class:`dpctl.tensor.usm_ndarray`. Args: obj: Python object to convert. Can be an instance of `usm_ndarray`, an object representing SYCL USM allocation and implementing `__sycl_usm_array_interface__` protocol, an instance of `numpy.ndarray`, an object supporting Python buffer protocol, a Python scalar, or a (possibly nested) sequence of Python scalars. dtype (data type, optional): output array data type. If `dtype` is `None`, the output array data type is inferred from data types in `obj`. Default: `None` copy (`bool`, optional): boolean indicating whether or not to copy the input. If `True`, always creates a copy. If `False`, need to copy raises `ValueError`. If `None`, try to reuse existing memory allocations if possible, but allowed to perform a copy otherwise. Default: `None`. order ("C","F","A","K", optional): memory layout of the output array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returned by `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. For `usm_type=None` the allocation type is inferred from the input if `obj` has USM allocation, or `"device"` is used instead. Default: `None`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ # 1. Check that copy is a valid keyword if copy not in [None, True, False]: raise TypeError( "Recognized copy keyword values should be True, False, or None" ) # 2. Check that dtype is None, or a valid dtype if dtype is not None: dtype = np.dtype(dtype) # 3. Validate order if not isinstance(order, str): raise TypeError( f"Expected order keyword to be of type str, got {type(order)}" ) if len(order) == 0 or order[0] not in "KkAaCcFf": raise ValueError( "Unrecognized order keyword value, expecting 'K', 'A', 'F', or 'C'." ) else: order = order[0].upper() # 4. Check that usm_type is None, or a valid value dpctl.utils.validate_usm_type(usm_type, allow_none=True) # 5. Normalize device/sycl_queue [keep it None if was None] if device is not None or sycl_queue is not None: sycl_queue = normalize_queue_device( sycl_queue=sycl_queue, device=device ) # handle instance(obj, usm_ndarray) if isinstance(obj, dpt.usm_ndarray): return _asarray_from_usm_ndarray( obj, dtype=dtype, copy=copy, usm_type=usm_type, sycl_queue=sycl_queue, order=order, ) elif hasattr(obj, "__sycl_usm_array_interface__"): sua_iface = getattr(obj, "__sycl_usm_array_interface__") membuf = dpm.as_usm_memory(obj) ary = dpt.usm_ndarray( sua_iface["shape"], dtype=sua_iface["typestr"], buffer=membuf, strides=sua_iface.get("strides", None), ) return _asarray_from_usm_ndarray( ary, dtype=dtype, copy=copy, usm_type=usm_type, sycl_queue=sycl_queue, order=order, ) elif isinstance(obj, np.ndarray): if copy is False: raise ValueError( "Converting numpy.ndarray to usm_ndarray requires a copy" ) return _asarray_from_numpy_ndarray( obj, dtype=dtype, usm_type=usm_type, sycl_queue=sycl_queue, order=order, ) elif _is_object_with_buffer_protocol(obj): if copy is False: raise ValueError( f"Converting {type(obj)} to usm_ndarray requires a copy" ) return _asarray_from_numpy_ndarray( np.array(obj), dtype=dtype, usm_type=usm_type, sycl_queue=sycl_queue, order=order, ) elif isinstance(obj, (list, tuple, range)): if copy is False: raise ValueError( "Converting Python sequence to usm_ndarray requires a copy" ) _, dt, devs = _array_info_sequence(obj) if devs == _host_set: return _asarray_from_numpy_ndarray( np.asarray(obj, dtype=dtype, order=order), dtype=dtype, usm_type=usm_type, sycl_queue=sycl_queue, order=order, ) # for sequences raise NotImplementedError( "Converting Python sequences is not implemented" ) if copy is False: raise ValueError( f"Converting {type(obj)} to usm_ndarray requires a copy" ) # obj is a scalar, create 0d array return _asarray_from_numpy_ndarray( np.asarray(obj), dtype=dtype, usm_type=usm_type, sycl_queue=sycl_queue, order="C", ) def empty( sh, dtype=None, order="C", device=None, usm_type="device", sycl_queue=None ): """ Creates `usm_ndarray` from uninitialized USM allocation. Args: shape (tuple): Dimensions of the array to be created. dtype (optional): data type of the array. Can be typestring, a `numpy.dtype` object, `numpy` char string, or a numpy scalar type. Default: None order ("C", or F"): memory layout for the array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returnedby `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `"device"`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if not isinstance(order, str) or len(order) == 0 or order[0] not in "CcFf": raise ValueError( "Unrecognized order keyword value, expecting 'F' or 'C'." ) else: order = order[0].upper() dpctl.utils.validate_usm_type(usm_type, allow_none=False) sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) dtype = _get_dtype(dtype, sycl_queue) res = dpt.usm_ndarray( sh, dtype=dtype, buffer=usm_type, order=order, buffer_ctor_kwargs={"queue": sycl_queue}, ) return res def _coerce_and_infer_dt(*args, dt): "Deduce arange type from sequence spec" nd, seq_dt, d = _array_info_sequence(args) if d != _host_set or nd != (len(args),): raise ValueError("start, stop and step must be Python scalars") if dt is None: dt = seq_dt dt = np.dtype(dt) if np.issubdtype(dt, np.integer): return tuple(int(v) for v in args), dt elif np.issubdtype(dt, np.floating): return tuple(float(v) for v in args), dt elif np.issubdtype(dt, np.complexfloating): return tuple(complex(v) for v in args), dt else: raise ValueError(f"Data type {dt} is not supported") def _get_arange_length(start, stop, step): "Compute length of arange sequence" span = stop - start if type(step) in [int, float] and type(span) in [int, float]: offset = -1 if step > 0 else 1 tmp = 1 + (span + offset) / step return tmp tmp = span / step if type(tmp) is complex and tmp.imag == 0: tmp = tmp.real else: return tmp k = int(tmp) if k > 0 and float(k) < tmp: tmp = tmp + 1 return tmp def arange( start, /, stop=None, step=1, *, dtype=None, device=None, usm_type="device", sycl_queue=None, ): """ arange(start, /, stop=None, step=1, *, dtype=None, \ device=None, usm_type="device", sycl_queue=None) -> usm_ndarray Args: start: device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returned by `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `'device'`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if stop is None: stop = start start = 0 ( start, stop, step, ), dt = _coerce_and_infer_dt(start, stop, step, dt=dtype) try: tmp = _get_arange_length(start, stop, step) sh = int(tmp) if sh < 0: sh = 0 except TypeError: sh = 0 dpctl.utils.validate_usm_type(usm_type, allow_none=False) sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) res = dpt.usm_ndarray( (sh,), dtype=dt, buffer=usm_type, order="C", buffer_ctor_kwargs={"queue": sycl_queue}, ) _step = (start + step) - start _step = dt.type(_step) hev, _ = ti._linspace_step(start, _step, res, sycl_queue) hev.wait() return res def zeros( sh, dtype=None, order="C", device=None, usm_type="device", sycl_queue=None ): """ Creates `usm_ndarray` with zero elements. Args: shape (tuple): Dimensions of the array to be created. dtype (optional): data type of the array. Can be typestring, a `numpy.dtype` object, `numpy` char string, or a numpy scalar type. Default: None order ("C", or F"): memory layout for the array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returnedby `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `"device"`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if not isinstance(order, str) or len(order) == 0 or order[0] not in "CcFf": raise ValueError( "Unrecognized order keyword value, expecting 'F' or 'C'." ) else: order = order[0].upper() dpctl.utils.validate_usm_type(usm_type, allow_none=False) sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) dtype = _get_dtype(dtype, sycl_queue) res = dpt.usm_ndarray( sh, dtype=dtype, buffer=usm_type, order=order, buffer_ctor_kwargs={"queue": sycl_queue}, ) res.usm_data.memset() return res def ones( sh, dtype=None, order="C", device=None, usm_type="device", sycl_queue=None ): """ Creates `usm_ndarray` with elements of one. Args: shape (tuple): Dimensions of the array to be created. dtype (optional): data type of the array. Can be typestring, a `numpy.dtype` object, `numpy` char string, or a numpy scalar type. Default: None order ("C", or F"): memory layout for the array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returnedby `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `"device"`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if not isinstance(order, str) or len(order) == 0 or order[0] not in "CcFf": raise ValueError( "Unrecognized order keyword value, expecting 'F' or 'C'." ) else: order = order[0].upper() dpctl.utils.validate_usm_type(usm_type, allow_none=False) sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) dtype = _get_dtype(dtype, sycl_queue) res = dpt.usm_ndarray( sh, dtype=dtype, buffer=usm_type, order=order, buffer_ctor_kwargs={"queue": sycl_queue}, ) hev, ev = ti._full_usm_ndarray(1, res, sycl_queue) hev.wait() return res def full( sh, fill_value, dtype=None, order="C", device=None, usm_type="device", sycl_queue=None, ): """ Creates `usm_ndarray` with elements of one. Args: shape (tuple): Dimensions of the array to be created. fill_value (int,float,complex): fill value dtype (optional): data type of the array. Can be typestring, a `numpy.dtype` object, `numpy` char string, or a numpy scalar type. Default: None order ("C", or F"): memory layout for the array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returnedby `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `"device"`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if not isinstance(order, str) or len(order) == 0 or order[0] not in "CcFf": raise ValueError( "Unrecognized order keyword value, expecting 'F' or 'C'." ) else: order = order[0].upper() dpctl.utils.validate_usm_type(usm_type, allow_none=False) sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) dtype = _get_dtype(dtype, sycl_queue, ref_type=type(fill_value)) res = dpt.usm_ndarray( sh, dtype=dtype, buffer=usm_type, order=order, buffer_ctor_kwargs={"queue": sycl_queue}, ) hev, ev = ti._full_usm_ndarray(fill_value, res, sycl_queue) hev.wait() return res def empty_like( x, dtype=None, order="C", device=None, usm_type=None, sycl_queue=None ): """ Creates `usm_ndarray` from uninitialized USM allocation. Args: x (usm_ndarray): Input array from which to derive the output array shape. dtype (optional): data type of the array. Can be typestring, a `numpy.dtype` object, `numpy` char string, or a numpy scalar type. Default: None order ("C", or F"): memory layout for the array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returnedby `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `"device"`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if not isinstance(x, dpt.usm_ndarray): raise TypeError(f"Expected instance of dpt.usm_ndarray, got {type(x)}.") if not isinstance(order, str) or len(order) == 0 or order[0] not in "CcFf": raise ValueError( "Unrecognized order keyword value, expecting 'F' or 'C'." ) else: order = order[0].upper() if dtype is None: dtype = x.dtype if usm_type is None: usm_type = x.usm_type dpctl.utils.validate_usm_type(usm_type, allow_none=False) if device is None and sycl_queue is None: device = x.device sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) sh = x.shape dtype = np.dtype(dtype) res = dpt.usm_ndarray( sh, dtype=dtype, buffer=usm_type, order=order, buffer_ctor_kwargs={"queue": sycl_queue}, ) return res def zeros_like( x, dtype=None, order="C", device=None, usm_type=None, sycl_queue=None ): """ Creates `usm_ndarray` from USM allocation initialized with zeros. Args: x (usm_ndarray): Input array from which to derive the output array shape. dtype (optional): data type of the array. Can be typestring, a `numpy.dtype` object, `numpy` char string, or a numpy scalar type. Default: None order ("C", or F"): memory layout for the array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returnedby `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `"device"`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if not isinstance(x, dpt.usm_ndarray): raise TypeError(f"Expected instance of dpt.usm_ndarray, got {type(x)}.") if not isinstance(order, str) or len(order) == 0 or order[0] not in "CcFf": raise ValueError( "Unrecognized order keyword value, expecting 'F' or 'C'." ) else: order = order[0].upper() if dtype is None: dtype = x.dtype if usm_type is None: usm_type = x.usm_type dpctl.utils.validate_usm_type(usm_type, allow_none=False) if device is None and sycl_queue is None: device = x.device sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) sh = x.shape dtype = np.dtype(dtype) return zeros( sh, dtype=dtype, order=order, device=device, usm_type=usm_type, sycl_queue=sycl_queue, ) def ones_like( x, dtype=None, order="C", device=None, usm_type=None, sycl_queue=None ): """ Creates `usm_ndarray` from USM allocation initialized with zeros. Args: x (usm_ndarray): Input array from which to derive the output array shape. dtype (optional): data type of the array. Can be typestring, a `numpy.dtype` object, `numpy` char string, or a numpy scalar type. Default: None order ("C", or F"): memory layout for the array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returnedby `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `"device"`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if not isinstance(x, dpt.usm_ndarray): raise TypeError(f"Expected instance of dpt.usm_ndarray, got {type(x)}.") if not isinstance(order, str) or len(order) == 0 or order[0] not in "CcFf": raise ValueError( "Unrecognized order keyword value, expecting 'F' or 'C'." ) else: order = order[0].upper() if dtype is None: dtype = x.dtype if usm_type is None: usm_type = x.usm_type dpctl.utils.validate_usm_type(usm_type, allow_none=False) if device is None and sycl_queue is None: device = x.device sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) sh = x.shape dtype = np.dtype(dtype) return ones( sh, dtype=dtype, order=order, device=device, usm_type=usm_type, sycl_queue=sycl_queue, ) def full_like( x, fill_value, dtype=None, order="C", device=None, usm_type=None, sycl_queue=None, ): """ Creates `usm_ndarray` from USM allocation initialized with zeros. Args: x (usm_ndarray): Input array from which to derive the output array shape. fill_value: the value to fill array with dtype (optional): data type of the array. Can be typestring, a `numpy.dtype` object, `numpy` char string, or a numpy scalar type. Default: None order ("C", or F"): memory layout for the array. Default: "C" device (optional): array API concept of device where the output array is created. `device` can be `None`, a oneAPI filter selector string, an instance of :class:`dpctl.SyclDevice` corresponding to a non-partitioned SYCL device, an instance of :class:`dpctl.SyclQueue`, or a `Device` object returnedby `dpctl.tensor.usm_array.device`. Default: `None`. usm_type ("device"|"shared"|"host", optional): The type of SYCL USM allocation for the output array. Default: `"device"`. sycl_queue (:class:`dpctl.SyclQueue`, optional): The SYCL queue to use for output array allocation and copying. `sycl_queue` and `device` are exclusive keywords, i.e. use one or another. If both are specified, a `TypeError` is raised unless both imply the same underlying SYCL queue to be used. If both a `None`, the `dpctl.SyclQueue()` is used for allocation and copying. Default: `None`. """ if not isinstance(x, dpt.usm_ndarray): raise TypeError(f"Expected instance of dpt.usm_ndarray, got {type(x)}.") if not isinstance(order, str) or len(order) == 0 or order[0] not in "CcFf": raise ValueError( "Unrecognized order keyword value, expecting 'F' or 'C'." ) else: order = order[0].upper() if dtype is None: dtype = x.dtype if usm_type is None: usm_type = x.usm_type dpctl.utils.validate_usm_type(usm_type, allow_none=False) if device is None and sycl_queue is None: device = x.device sycl_queue = normalize_queue_device(sycl_queue=sycl_queue, device=device) sh = x.shape dtype = np.dtype(dtype) return full( sh, fill_value, dtype=dtype, order=order, device=device, usm_type=usm_type, sycl_queue=sycl_queue, )
38.139066
80
0.613077
4,796
35,927
4.461843
0.070267
0.055096
0.023132
0.017664
0.771111
0.748306
0.727931
0.712837
0.700407
0.691574
0
0.002442
0.293456
35,927
941
81
38.179596
0.840569
0.429148
0
0.59401
0
0
0.089031
0.004142
0
0
0
0.001063
0.001664
1
0.02995
false
0.006656
0.011647
0
0.109817
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e9b1653174d672f1b69bc02d2f6d33001f1e382b
213
py
Python
main.py
GUTINGLIAO/net-structure-classes-num-expr
f05222dc517c61a1627c427ef4981b6da13c755c
[ "MIT" ]
2
2021-01-18T15:47:32.000Z
2021-01-20T05:37:48.000Z
main.py
GUTINGLIAO/net-structure-classes-num-expr
f05222dc517c61a1627c427ef4981b6da13c755c
[ "MIT" ]
null
null
null
main.py
GUTINGLIAO/net-structure-classes-num-expr
f05222dc517c61a1627c427ef4981b6da13c755c
[ "MIT" ]
null
null
null
from net.cnn import Cnn, device from net.instance import simple_cnn_cifar10_instance_10_classes if __name__ == '__main__': print(device) net: Cnn = simple_cnn_cifar10_instance_10_classes net.test()
21.3
63
0.769953
31
213
4.709677
0.483871
0.09589
0.219178
0.328767
0.452055
0.452055
0
0
0
0
0
0.044693
0.159624
213
9
64
23.666667
0.77095
0
0
0
0
0
0.037915
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.166667
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
e9b3d97eebd0f9ca179a356f9a4c34de70a379da
70
py
Python
lesson05/prefecture/hyogo_behavior.py
muzudho/py-state-machine-practice
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
[ "MIT" ]
null
null
null
lesson05/prefecture/hyogo_behavior.py
muzudho/py-state-machine-practice
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
[ "MIT" ]
null
null
null
lesson05/prefecture/hyogo_behavior.py
muzudho/py-state-machine-practice
e31c066f4cf142b6b6c5ff273b56a0f89428c59e
[ "MIT" ]
null
null
null
class HyogoBehavior: def print(self): print("Port Tower")
17.5
27
0.628571
8
70
5.5
0.875
0
0
0
0
0
0
0
0
0
0
0
0.257143
70
3
28
23.333333
0.846154
0
0
0
0
0
0.142857
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0.666667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
1
0
5
75a3c2883372c724766600a702819558e03c53d5
55
py
Python
tests/alphanumeric/tests/__init__.py
contentoolsraushan/contentools-raushan-raj
3205e72c4a18249adc5c7fd4058540e1ba92a55d
[ "BSD-3-Clause" ]
1
2016-11-16T02:34:00.000Z
2016-11-16T02:34:00.000Z
tests/alphanumeric/tests/__init__.py
contentoolsraushan/contentools-raushan-raj
3205e72c4a18249adc5c7fd4058540e1ba92a55d
[ "BSD-3-Clause" ]
null
null
null
tests/alphanumeric/tests/__init__.py
contentoolsraushan/contentools-raushan-raj
3205e72c4a18249adc5c7fd4058540e1ba92a55d
[ "BSD-3-Clause" ]
1
2020-06-12T09:54:55.000Z
2020-06-12T09:54:55.000Z
from alphanumeric.tests.views import * # flake8: noqa
27.5
54
0.763636
7
55
6
1
0
0
0
0
0
0
0
0
0
0
0.021277
0.145455
55
1
55
55
0.87234
0.218182
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5