hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
01f5555405b863634047628b24febefbf4efe699 | 2,149 | py | Python | karp5/tests/unit_tests/server/translator/test_parser_freetext.py | spraakbanken/karp-backend-v5 | bfca9d0f29a1243ee8d817c6a7db8b30a7da1097 | [
"MIT"
] | 4 | 2018-01-09T10:20:22.000Z | 2019-11-21T12:26:56.000Z | karp5/tests/unit_tests/server/translator/test_parser_freetext.py | spraakbanken/karp-backend-v5 | bfca9d0f29a1243ee8d817c6a7db8b30a7da1097 | [
"MIT"
] | 44 | 2018-03-23T13:59:13.000Z | 2022-03-29T06:03:17.000Z | karp5/tests/unit_tests/server/translator/test_parser_freetext.py | spraakbanken/karp-backend-v5 | bfca9d0f29a1243ee8d817c6a7db8b30a7da1097 | [
"MIT"
] | 2 | 2018-01-07T12:08:32.000Z | 2019-08-21T08:05:17.000Z | from karp5.server.translator.parser import freetext
from karp5.tests.util import assert_es_search
def test_freetext_minimum():
text = None
mode = "karp"
result = freetext(text, mode)
expected = {
"query": {
"bool": {
"should": [
{"match": {"_all": {"operator": "and", "query": text}}},
{"match": {"lemma_german": {"boost": 200, "query": text}}},
{"match": {"english.lemma_english": {"boost": 100, "query": text}}},
]
}
}
}
assert_es_search(result, expected)
assert result == expected
def test_freetext_with_extra():
text = None
mode = "karp"
extra = {"term": {"extra": "extra"}}
result = freetext(text, mode, extra=extra)
expected = {
"query": {
"bool": {
"must": [
{
"bool": {
"should": [
{"match": {"_all": {"operator": "and", "query": text}}},
{"match": {"lemma_german": {"boost": 200, "query": text}}},
{"match": {"english.lemma_english": {"boost": 100, "query": text}}},
]
}
},
extra,
]
}
}
}
assert_es_search(result, expected)
assert result == expected
def test_freetext_with_filters():
text = None
mode = "karp"
filters = [{"term": {"extra": "extra"}}]
result = freetext(text, mode, filters=filters)
expected = {
"query": {
"bool": {
"should": [
{"match": {"_all": {"operator": "and", "query": text}}},
{"match": {"lemma_german": {"boost": 200, "query": text}}},
{"match": {"english.lemma_english": {"boost": 100, "query": text}}},
],
"filter": filters[0],
}
}
}
assert_es_search(result, expected)
assert result == expected
| 27.551282 | 100 | 0.420661 | 170 | 2,149 | 5.170588 | 0.241176 | 0.09215 | 0.095563 | 0.054608 | 0.700796 | 0.700796 | 0.700796 | 0.618885 | 0.564278 | 0.564278 | 0 | 0.01676 | 0.416938 | 2,149 | 77 | 101 | 27.909091 | 0.684757 | 0 | 0 | 0.539683 | 0 | 0 | 0.168994 | 0.02933 | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0.047619 | false | 0 | 0.031746 | 0 | 0.079365 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
01fa5865bb578ac9e5d17a7864de57cbbc115125 | 285 | py | Python | pyd3ckbase/cfg.py | d3ck-org/pyd3ckbase | bcc5e338aaeed41ff898e997635d5fbf8572b4a5 | [
"MIT"
] | null | null | null | pyd3ckbase/cfg.py | d3ck-org/pyd3ckbase | bcc5e338aaeed41ff898e997635d5fbf8572b4a5 | [
"MIT"
] | null | null | null | pyd3ckbase/cfg.py | d3ck-org/pyd3ckbase | bcc5e338aaeed41ff898e997635d5fbf8572b4a5 | [
"MIT"
] | null | null | null | def init(cfg):
data = {'_logLevel': 'WARN', '_moduleLogLevel': 'WARN'}
if cfg['_stage'] == 'dev':
data.update({
'_logLevel': 'DEBUG',
'_moduleLogLevel': 'WARN',
'_logFormat': '%(levelname)s: %(message)s'
})
return data
| 23.75 | 59 | 0.494737 | 25 | 285 | 5.4 | 0.68 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.312281 | 285 | 11 | 60 | 25.909091 | 0.688776 | 0 | 0 | 0 | 0 | 0 | 0.385965 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0 | 0 | 0.222222 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bf2ad3a0d41258d0fc6f4c181d099177809c7206 | 4,738 | py | Python | 2018/15_BeverageBandits/person.py | deanearlwright/AdventOfCode | ca4cf6315c0efa38bd7748fb6f4bc99e7934871d | [
"MIT"
] | 1 | 2021-01-03T23:09:28.000Z | 2021-01-03T23:09:28.000Z | 2018/15_BeverageBandits/person.py | deanearlwright/AdventOfCode | ca4cf6315c0efa38bd7748fb6f4bc99e7934871d | [
"MIT"
] | 6 | 2020-12-26T21:02:42.000Z | 2020-12-26T21:02:52.000Z | 2018/15_BeverageBandits/person.py | deanearlwright/AdventOfCode | ca4cf6315c0efa38bd7748fb6f4bc99e7934871d | [
"MIT"
] | null | null | null | # ======================================================================
# Beverage Bandits
# Advent of Code 2018 Day 15 -- Eric Wastl -- https://adventofcode.com
#
# Python implementation by Dr. Dean Earl Wright III
# ======================================================================
# ======================================================================
# p e r s o n . p y
# ======================================================================
"People and persons for the Advent of Code 2018 Day 15 puzzle"
# ----------------------------------------------------------------------
# import
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# constants
# ----------------------------------------------------------------------
ROW_MULT = 100
ADJACENT = [-100, -1, 1, 100]
# ----------------------------------------------------------------------
# location
# ----------------------------------------------------------------------
def row_col_to_loc(row, col):
return row * ROW_MULT + col
def loc_to_row_col(loc):
return divmod(loc, ROW_MULT)
def distance(loc1, loc2):
loc1row, loc1col = loc_to_row_col(loc1)
loc2row, loc2col = loc_to_row_col(loc2)
return abs(loc1row - loc2row) + abs(loc1col - loc2col)
def adjacent(loc1, loc2):
return distance(loc1, loc2) == 1
# ======================================================================
# Person
# ======================================================================
class Person(object): # pylint: disable=R0902, R0205
"Elf/Goblin for Beverage Bandits"
def __init__(self, letter='#', location=0, attack=3):
# 1. Set the initial values
self.letter = letter
self.location = location
self.hitpoints = 200
self.attack = attack
def distance(self, location):
return distance(self.location, location)
def attacks(self, other):
other.hitpoints = max(0, other.hitpoints - self.attack)
def adjacent(self):
return [self.location + a for a in ADJACENT]
# ======================================================================
# People
# ======================================================================
class People(object): # pylint: disable=R0902, R0205
"Multiple Elf/Goblin for Beverage Bandits"
def __init__(self, letter='#'):
# 1. Set the initial values
self.letter = letter
self.persons = {}
def __len__(self):
return len(self.persons)
def __getitem__(self, loc):
if loc in self.persons:
return self.persons[loc]
else:
raise AttributeError("No such location: %s" % loc)
def __setitem__(self, loc, person):
if self.letter != person.letter:
raise ValueError("Incompatable letters: %s != %s" % (self.letter, person.letter))
if loc != person.location:
raise ValueError("Incompatable locations: %s != %s" % (loc, person.location))
self.persons[loc] = person
def __delitem__(self, loc):
if loc in self.persons:
del self.persons[loc]
else:
raise AttributeError("No such location: %s" % loc)
def __iter__(self):
return iter(self.persons)
def __contains__(self, loc):
return loc in self.persons
def add(self, person):
if self.letter != person.letter:
raise ValueError("Incompatable letters: %s != %s" % (self.letter, person.letter))
if person.location in self.persons:
raise ValueError("Location %s already occuried" % (person.location))
self.persons[person.location] = person
def locations(self):
keys = list(self.persons.keys())
keys.sort()
return keys
def hitpoints(self):
return sum([x.hitpoints for x in self.persons.values()])
# ----------------------------------------------------------------------
# module initialization
# ----------------------------------------------------------------------
if __name__ == '__main__':
pass
# ======================================================================
# end p e r s o n . p y end
# ======================================================================
| 35.62406 | 93 | 0.395314 | 388 | 4,738 | 4.685567 | 0.291237 | 0.078658 | 0.035754 | 0.048405 | 0.346535 | 0.314631 | 0.291529 | 0.264026 | 0.255226 | 0.162816 | 0 | 0.018396 | 0.265724 | 4,738 | 132 | 94 | 35.893939 | 0.504168 | 0.46412 | 0 | 0.179104 | 0 | 0 | 0.114058 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.268657 | false | 0.014925 | 0 | 0.134328 | 0.477612 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
bf2fb37e2d84aa2bb43c8c8b5f3f38565fbb8a33 | 3,289 | py | Python | inference/utils/func.py | Benjamin-deLaverny/RootNav-2.0 | 14b6d7353687acf640e5efbd224a35d9131e7275 | [
"BSD-3-Clause"
] | 23 | 2019-07-25T10:15:20.000Z | 2022-01-26T03:28:56.000Z | inference/utils/func.py | rootnav2/RootNav-2.0 | 3e973c0f7fc34b3938a2294e858d1a0de76e9f0f | [
"BSD-3-Clause"
] | 7 | 2019-08-07T15:56:26.000Z | 2022-01-13T01:28:22.000Z | inference/utils/func.py | rootnav2/RootNav-2.0 | 3e973c0f7fc34b3938a2294e858d1a0de76e9f0f | [
"BSD-3-Clause"
] | 11 | 2019-07-25T10:15:25.000Z | 2022-02-15T09:14:49.000Z | import kdtree
def nonmaximalsuppression(tensor, threshold):
pred_data = tensor.storage()
offset = tensor.storage_offset()
stride = int(tensor.stride()[0])
numel = tensor.numel()
points = []
# Corners
val = pred_data[0 + offset]
if val >= threshold and val >= pred_data[1 + offset] and val >= pred_data[stride + offset]:
points.append([0, 0])
val = pred_data[stride - 1 + offset]
if val >= threshold and val >= pred_data[stride - 2 + offset] and val >= pred_data[2 * stride - 1 + offset]:
points.append([stride - 1, 0])
val = pred_data[numel - stride + offset]
if val > threshold and val >= pred_data[numel - stride + 1 + offset] and val >= pred_data[numel - 2 * stride + offset]:
points.append([0, stride - 1])
val = pred_data[numel - 1 + offset]
if val > threshold and val >= pred_data[numel -2 + offset] and val >= pred_data[numel - 1 - stride + offset]:
points.append([stride - 1, stride - 1])
# Top y==0
for i in range(1,stride-1):
i += offset
val = pred_data[i]
if val >= threshold and val >= pred_data[i-1] and val >= pred_data[i+1] and val >= pred_data[i+stride]:
points.append([i - offset, 0])
# Bottom y==stride-1
for i in range(numel-stride+1,numel-1):
i += offset
val = pred_data[i]
if val >= threshold and val >= pred_data[i-1] and val >= pred_data[i+1] and val >= pred_data[i-stride]:
points.append([i - numel + stride - offset, stride - 1])
# Front x==0
for i in range(stride, stride * (stride - 1), stride):
i += offset
val = pred_data[i]
if val >= threshold and val >= pred_data[i+stride] and val >= pred_data[i-stride] and val >= pred_data[i+1]:
points.append([0, (i - offset) // stride])
# Back x == stride-1
for i in range(stride - 1, stride * (stride - 1), stride):
i += offset
val = pred_data[i]
if val >= threshold and val >= pred_data[i+stride] and val >= pred_data[i-stride] and val >= pred_data[i-1]:
points.append([stride - 1, (i - offset) // stride])
# Remaining inner pixels
for i in range(stride+1, stride * (stride - 1), stride):
for j in range(i,i+stride-2):
j += offset
val = pred_data[j]
if val >= threshold and val >= pred_data[j+1] and val >= pred_data[j-1] and val >= pred_data[j+stride] and val >= pred_data[j-stride]:
points.append([(j - offset) % stride, i // stride])
return points
def euclid(pt1, pt2):
return (pt1[0] - pt2[0]) ** 2 + (pt1[1] - pt2[1]) ** 2
def rrtree(lat, threshold):
if lat is None or len(lat) == 0:
return []
tree = kdtree.create(dimensions=2)
distance_threshold = threshold # 8^2
for i,pt in enumerate(lat):
t_pt = (float(pt[0]), float(pt[1]))
search_result = tree.search_nn(t_pt, dist=euclid)
if search_result is None:
tree.add(t_pt)
else:
node, dist = search_result[0], search_result[1]
if dist >= distance_threshold:
tree.add(t_pt)
filtered_points = [(int(pt.data[0]), int(pt.data[1])) for pt in kdtree.level_order(tree)]
return filtered_points
| 38.244186 | 146 | 0.579203 | 490 | 3,289 | 3.787755 | 0.132653 | 0.146552 | 0.195582 | 0.181034 | 0.55819 | 0.460668 | 0.381466 | 0.373922 | 0.355603 | 0.294181 | 0 | 0.028632 | 0.277896 | 3,289 | 85 | 147 | 38.694118 | 0.752842 | 0.027972 | 0 | 0.15625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.046875 | false | 0 | 0.015625 | 0.015625 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bf31607182f2bee7e38bf8f33cf2315acfb8bbfd | 515 | py | Python | c_core_librairies/make_book_report.py | nicolasessisbreton/pyzehe | 7497a0095d974ac912ce9826a27e21fd9d513942 | [
"Apache-2.0"
] | 1 | 2018-05-31T19:36:36.000Z | 2018-05-31T19:36:36.000Z | c_core_librairies/make_book_report.py | nicolasessisbreton/pyzehe | 7497a0095d974ac912ce9826a27e21fd9d513942 | [
"Apache-2.0"
] | 1 | 2018-05-31T01:10:51.000Z | 2018-05-31T01:10:51.000Z | c_core_librairies/make_book_report.py | nicolasessisbreton/pyzehe | 7497a0095d974ac912ce9826a27e21fd9d513942 | [
"Apache-2.0"
] | null | null | null | """
here we produce visualization with excel
# one-time excel preparation
in cmder
xlwings addin install
open excel
enable Trust access to VBA
File > Options > Trust Center > Trust Center Settings > Macro Settings
# making the visualization workbook
in cmder
chdir this_directory
xlwings quickstart book_report
the result of this is in
sol_book_report\*
# studying the visualization
in cmder type excel
open with excel sol_book_report\book_report.xlsm
open with sublime book_report.py
""" | 20.6 | 73 | 0.772816 | 75 | 515 | 5.2 | 0.573333 | 0.128205 | 0.066667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.190291 | 515 | 25 | 74 | 20.6 | 0.935252 | 1.254369 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bf3a723a0507b0509dee2093111300ae6c1b90c0 | 2,042 | py | Python | detection/config.py | manasjuneja/sign_language_interpreter | c815cf6c2831a059af12fd74933541b2a2a039b0 | [
"MIT"
] | null | null | null | detection/config.py | manasjuneja/sign_language_interpreter | c815cf6c2831a059af12fd74933541b2a2a039b0 | [
"MIT"
] | null | null | null | detection/config.py | manasjuneja/sign_language_interpreter | c815cf6c2831a059af12fd74933541b2a2a039b0 | [
"MIT"
] | null | null | null | import os
import sys
import json
# data path to root directory to get to config.json
root_dir = os.path.dirname(sys.modules['__main__'].__file__)
# the config file can be used in two ways which changes the data_path
# if called from detection when it's been run to test, the folder has a __main__
# and hence we need to get the parent
# read and return the required data, will send as string
def getConfig(section, var):
try:
data_path = os.path.join(root_dir, "configuration", "config.json")
with open(data_path, 'r') as fl:
# data contains the config information
data = json.load(fl)
return data.get(section).get(var)
except FileNotFoundError:
data_path = os.path.join(os.path.join(root_dir,os.pardir), "configuration", "config.json")
with open(data_path, 'r') as fl:
# data contains the config information
data = json.load(fl)
return data.get(section).get(var)
# write data to configuration file
def writeConfig(section, var, data):
try:
data_path = os.path.join(root_dir, "configuration", "config.json")
with open(data_path, 'r+') as fl:
# read and store data
jsonData = json.load(fl)
del jsonData[section][var]
jsonData[section][var] = data
fl.close()
# to write data
with open(data_path, 'w') as fl:
json.dump(jsonData, fl)
fl.close()
except FileNotFoundError:
data_path = os.path.join(os.path.join(root_dir,os.pardir), "configuration", "config.json")
data_path = os.path.join(root_dir, "configuration", "config.json")
with open(data_path, 'r+') as fl:
# read and store data
jsonData = json.load(fl)
del jsonData[section][var]
jsonData[section][var] = data
fl.close()
# to write data
with open(data_path, 'w') as fl:
json.dump(jsonData, fl)
fl.close()
| 35.824561 | 98 | 0.604799 | 279 | 2,042 | 4.318996 | 0.268817 | 0.086307 | 0.058091 | 0.079668 | 0.695436 | 0.695436 | 0.695436 | 0.695436 | 0.695436 | 0.695436 | 0 | 0 | 0.289422 | 2,042 | 56 | 99 | 36.464286 | 0.830462 | 0.226249 | 0 | 0.837838 | 0 | 0 | 0.08679 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054054 | false | 0 | 0.081081 | 0 | 0.189189 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
170f040109970ea281f30e9adb297272472bff9e | 1,080 | py | Python | OS/processScheduling/PCB.py | cheniison/Experiment | acc0308e0a4a1e4ef297ecf92231af3aae3aaa74 | [
"MIT"
] | null | null | null | OS/processScheduling/PCB.py | cheniison/Experiment | acc0308e0a4a1e4ef297ecf92231af3aae3aaa74 | [
"MIT"
] | null | null | null | OS/processScheduling/PCB.py | cheniison/Experiment | acc0308e0a4a1e4ef297ecf92231af3aae3aaa74 | [
"MIT"
] | null | null | null | """
进程控制类
"""
"""
PCB
进程控制块
"""
class PCB(object):
def __init__(self, name, a_time, e_time, priority):
self.name = name # 进程名
self.u_time = 0 # 已经被调度的时间
self.a_time = a_time # 进程到来的时间
self.e_time = e_time # 预计需要被调度的时间
self.priority = priority # 优先级
"""
进程控制块队列
"""
class PCBList(object):
def __init__(self):
self.pcb_list = list()
def append(self, pcb):
self.pcb_list.append(pcb)
def pop(self):
return self.pcb_list.pop(0)
def pop_min_e_time(self):
index = 0
for i in range(0, len(self.pcb_list)):
if self.pcb_list[i].e_time < self.pcb_list[index].e_time:
index = i
return self.pcb_list.pop(index)
def pop_max_priority(self):
index = 0
for i in range(0, len(self.pcb_list)):
if self.pcb_list[i].priority < self.pcb_list[index].priority:
index = i
return self.pcb_list.pop(index)
def empty(self):
return len(self.pcb_list) == 0
| 22.5 | 73 | 0.552778 | 151 | 1,080 | 3.728477 | 0.238411 | 0.161634 | 0.234458 | 0.090586 | 0.333925 | 0.298401 | 0.298401 | 0.298401 | 0.298401 | 0.17762 | 0 | 0.009655 | 0.328704 | 1,080 | 47 | 74 | 22.978723 | 0.766897 | 0.038889 | 0 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0.071429 | 0.464286 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
171f847caefe343398f9cdf057b6a4325a34a1c4 | 1,517 | py | Python | Menta/Profiles/models.py | reembot/Menta | 7da11e28d6d9f61075ab835548956168671b509d | [
"MIT"
] | 2 | 2022-03-26T03:04:45.000Z | 2022-03-26T08:49:32.000Z | Menta/Profiles/models.py | reembot/Menta | 7da11e28d6d9f61075ab835548956168671b509d | [
"MIT"
] | null | null | null | Menta/Profiles/models.py | reembot/Menta | 7da11e28d6d9f61075ab835548956168671b509d | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class Skill(models.Model):
skillName= models.CharField(verbose_name='Skill', max_length=100)
SKILL_CHOICES = [
('B', 'Beginner'),
('I', 'Intermediate'),
('E', 'Expert'),
]
level= models.CharField(verbose_name='Proficiency', choices= SKILL_CHOICES, max_length=100, default= 'B')
class Profile(models.Model):
firstName= models.CharField(verbose_name= 'First Name', max_length=100, null= False)
lastName= models.CharField(verbose_name= 'Last Name', max_length=100)
TYPE_CHOICES = [
('MO', 'Mentor'),
('ME', 'Mentee'),
]
userType= models.CharField(verbose_name='Mentor/Mentee', choices= TYPE_CHOICES, max_length=100, null= False)
occupation= models.CharField(verbose_name= 'Occupation', max_length=100)
website= models.URLField(verbose_name='Professional Portfolio URL (GitHub, LinkedIn, etc.)')
age= models.PositiveSmallIntegerField(verbose_name= 'Age', null= True)
GENDER_CHOICES = [
('M', 'Male'),
('F', 'Female'),
('O', 'Other'),
]
gender= models.CharField(verbose_name='Gender', choices= GENDER_CHOICES, max_length=100, null= True)
biography= models.TextField(verbose_name='Personal Bio', max_length=2000)
photos= models.URLField(verbose_name='Headshot URL', null= True)
skills = models.ManyToManyField(Skill)
contact= models.EmailField(verbose_name= 'Contact email', max_length=100, null= False)
| 31.604167 | 112 | 0.675016 | 176 | 1,517 | 5.664773 | 0.409091 | 0.132397 | 0.096289 | 0.182548 | 0.09328 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022544 | 0.181279 | 1,517 | 47 | 113 | 32.276596 | 0.780193 | 0.015821 | 0 | 0 | 0 | 0 | 0.146881 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.033333 | 0 | 0.633333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
173a18bb835beb1e9c0c021e39c7610686574f0b | 31,166 | py | Python | heat/tests/test_volume.py | citrix-openstack-build/heat | fa31873529481472e037e3ce157b87f8057fe622 | [
"Apache-2.0"
] | null | null | null | heat/tests/test_volume.py | citrix-openstack-build/heat | fa31873529481472e037e3ce157b87f8057fe622 | [
"Apache-2.0"
] | null | null | null | heat/tests/test_volume.py | citrix-openstack-build/heat | fa31873529481472e037e3ce157b87f8057fe622 | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import mox
from testtools import skipIf
from heat.common import exception
from heat.common import template_format
from heat.engine import scheduler
from heat.engine.resources import instance
from heat.engine.resources import volume as vol
from heat.engine import clients
from heat.engine import resource
from heat.openstack.common.importutils import try_import
from heat.tests.common import HeatTestCase
from heat.tests.v1_1 import fakes
from heat.tests import utils
from cinderclient.v1 import client as cinderclient
volume_backups = try_import('cinderclient.v1.volume_backups')
volume_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Volume Test",
"Parameters" : {},
"Resources" : {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.large",
"KeyName" : "test",
"UserData" : "some data"
}
},
"DataVolume" : {
"Type" : "AWS::EC2::Volume",
"Properties" : {
"Size" : "1",
"AvailabilityZone" : {"Fn::GetAtt": ["WikiDatabase",
"AvailabilityZone"]},
"Tags" : [{ "Key" : "Usage", "Value" : "Wiki Data Volume" }]
}
},
"MountPoint" : {
"Type" : "AWS::EC2::VolumeAttachment",
"Properties" : {
"InstanceId" : { "Ref" : "WikiDatabase" },
"VolumeId" : { "Ref" : "DataVolume" },
"Device" : "/dev/vdc"
}
}
}
}
'''
class VolumeTest(HeatTestCase):
def setUp(self):
super(VolumeTest, self).setUp()
self.fc = fakes.FakeClient()
self.cinder_fc = cinderclient.Client('username', 'password')
self.m.StubOutWithMock(clients.OpenStackClients, 'cinder')
self.m.StubOutWithMock(clients.OpenStackClients, 'nova')
self.m.StubOutWithMock(self.cinder_fc.volumes, 'create')
self.m.StubOutWithMock(self.cinder_fc.volumes, 'get')
self.m.StubOutWithMock(self.cinder_fc.volumes, 'delete')
self.m.StubOutWithMock(self.fc.volumes, 'create_server_volume')
self.m.StubOutWithMock(self.fc.volumes, 'delete_server_volume')
utils.setup_dummy_db()
def create_volume(self, t, stack, resource_name):
data = t['Resources'][resource_name]
data['Properties']['AvailabilityZone'] = 'nova'
rsrc = vol.Volume(resource_name, data, stack)
self.assertEqual(rsrc.validate(), None)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
return rsrc
def create_attachment(self, t, stack, resource_name):
rsrc = vol.VolumeAttachment(resource_name,
t['Resources'][resource_name],
stack)
self.assertEqual(rsrc.validate(), None)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
return rsrc
def _mock_create_volume(self, fv, stack_name):
clients.OpenStackClients.cinder().MultipleTimes().AndReturn(
self.cinder_fc)
vol_name = utils.PhysName(stack_name, 'DataVolume')
self.cinder_fc.volumes.create(
size=u'1', availability_zone='nova',
display_description=vol_name,
display_name=vol_name).AndReturn(fv)
def _stubout_delete_volume(self, fv):
self.m.StubOutWithMock(fv, 'delete')
fv.delete().AndReturn(True)
self.m.StubOutWithMock(fv, 'get')
fv.get().AndReturn(None)
fv.get().AndRaise(
clients.cinderclient.exceptions.NotFound('Not found'))
self.m.ReplayAll()
def _mock_create_server_volume_script(self, fva):
clients.OpenStackClients.nova().MultipleTimes().AndReturn(self.fc)
self.fc.volumes.create_server_volume(
device=u'/dev/vdc', server_id=u'WikiDatabase',
volume_id=u'vol-123').AndReturn(fva)
self.cinder_fc.volumes.get('vol-123').AndReturn(fva)
def test_volume(self):
fv = FakeVolume('creating', 'available')
stack_name = 'test_volume_stack'
# create script
self._mock_create_volume(fv, stack_name)
# delete script
self.cinder_fc.volumes.get('vol-123').AndReturn(fv)
self.cinder_fc.volumes.get('vol-123').AndReturn(fv)
self.m.ReplayAll()
t = template_format.parse(volume_template)
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = self.create_volume(t, stack, 'DataVolume')
self.assertEqual(fv.status, 'available')
self.assertRaises(resource.UpdateReplace,
rsrc.handle_update, {}, {}, {})
fv.status = 'in-use'
self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.destroy))
self._stubout_delete_volume(fv)
fv.status = 'available'
scheduler.TaskRunner(rsrc.destroy)()
# Test when volume already deleted
rsrc.state_set(rsrc.CREATE, rsrc.COMPLETE)
scheduler.TaskRunner(rsrc.destroy)()
self.m.VerifyAll()
def test_volume_default_az(self):
fv = FakeVolume('creating', 'available')
stack_name = 'test_volume_stack'
# create script
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
self.m.StubOutWithMock(vol.VolumeAttachment, 'handle_create')
self.m.StubOutWithMock(vol.VolumeAttachment, 'check_create_complete')
instance.Instance.handle_create().AndReturn(None)
instance.Instance.check_create_complete(None).AndReturn(True)
clients.OpenStackClients.cinder().MultipleTimes().AndReturn(
self.cinder_fc)
vol_name = utils.PhysName(stack_name, 'DataVolume')
self.cinder_fc.volumes.create(
size=u'1', availability_zone=None,
display_description=vol_name,
display_name=vol_name).AndReturn(fv)
vol.VolumeAttachment.handle_create().AndReturn(None)
vol.VolumeAttachment.check_create_complete(None).AndReturn(True)
# delete script
self.m.StubOutWithMock(instance.Instance, 'handle_delete')
self.m.StubOutWithMock(vol.VolumeAttachment, 'handle_delete')
instance.Instance.handle_delete().AndReturn(None)
self.cinder_fc.volumes.get('vol-123').AndRaise(
clients.cinderclient.exceptions.NotFound('Not found'))
vol.VolumeAttachment.handle_delete().AndReturn(None)
self.m.ReplayAll()
t = template_format.parse(volume_template)
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = stack['DataVolume']
self.assertEqual(rsrc.validate(), None)
scheduler.TaskRunner(stack.create)()
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
scheduler.TaskRunner(stack.delete)()
self.m.VerifyAll()
def test_volume_create_error(self):
fv = FakeVolume('creating', 'error')
stack_name = 'test_volume_create_error_stack'
self._mock_create_volume(fv, stack_name)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = vol.Volume('DataVolume',
t['Resources']['DataVolume'],
stack)
create = scheduler.TaskRunner(rsrc.create)
self.assertRaises(exception.ResourceFailure, create)
self.m.VerifyAll()
def test_volume_attachment_error(self):
fv = FakeVolume('creating', 'available')
fva = FakeVolume('attaching', 'error')
stack_name = 'test_volume_attach_error_stack'
self._mock_create_volume(fv, stack_name)
self._mock_create_server_volume_script(fva)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
scheduler.TaskRunner(stack['DataVolume'].create)()
self.assertEqual(fv.status, 'available')
rsrc = vol.VolumeAttachment('MountPoint',
t['Resources']['MountPoint'],
stack)
create = scheduler.TaskRunner(rsrc.create)
self.assertRaises(exception.ResourceFailure, create)
self.m.VerifyAll()
def test_volume_attachment(self):
fv = FakeVolume('creating', 'available')
fva = FakeVolume('attaching', 'in-use')
stack_name = 'test_volume_attach_stack'
self._mock_create_volume(fv, stack_name)
self._mock_create_server_volume_script(fva)
# delete script
fva = FakeVolume('in-use', 'available')
self.fc.volumes.delete_server_volume('WikiDatabase',
'vol-123').AndReturn(None)
self.cinder_fc.volumes.get('vol-123').AndReturn(fva)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
scheduler.TaskRunner(stack['DataVolume'].create)()
self.assertEqual(fv.status, 'available')
rsrc = self.create_attachment(t, stack, 'MountPoint')
self.assertRaises(resource.UpdateReplace,
rsrc.handle_update, {}, {}, {})
scheduler.TaskRunner(rsrc.delete)()
self.m.VerifyAll()
def test_volume_detachment_err(self):
fv = FakeVolume('creating', 'available')
fva = FakeVolume('in-use', 'available')
stack_name = 'test_volume_detach_stack'
self._mock_create_volume(fv, stack_name)
self._mock_create_server_volume_script(fva)
# delete script
fva = FakeVolume('i-use', 'available')
self.m.StubOutWithMock(fva, 'get')
fva.get().MultipleTimes()
fva.status = "in-use"
self.cinder_fc.volumes.get('vol-123').AndReturn(fva)
self.fc.volumes.delete_server_volume(
'WikiDatabase', 'vol-123').AndRaise(
clients.novaclient.exceptions.BadRequest('Already detached'))
self.fc.volumes.delete_server_volume(
'WikiDatabase', 'vol-123').AndRaise(
clients.novaclient.exceptions.NotFound('Not found'))
self.fc.volumes.delete_server_volume(
'WikiDatabase', 'vol-123').AndRaise(
clients.novaclient.exceptions.NotFound('Not found'))
self.fc.volumes.delete_server_volume(
'WikiDatabase', 'vol-123').AndRaise(
clients.cinderclient.exceptions.NotFound('Not found'))
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
scheduler.TaskRunner(stack['DataVolume'].create)()
self.assertEqual(fv.status, 'available')
rsrc = self.create_attachment(t, stack, 'MountPoint')
self.assertRaises(resource.UpdateReplace,
rsrc.handle_update, {}, {}, {})
scheduler.TaskRunner(rsrc.delete)()
self.m.VerifyAll()
def test_volume_detach_non_exist(self):
fv = FakeVolume('creating', 'available')
fva = FakeVolume('in-use', 'available')
stack_name = 'test_volume_detach_stack'
self._mock_create_volume(fv, stack_name)
self._mock_create_server_volume_script(fva)
# delete script
self.cinder_fc.volumes.get('vol-123').AndRaise(
clients.cinderclient.exceptions.NotFound('Not found'))
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
scheduler.TaskRunner(stack['DataVolume'].create)()
rsrc = self.create_attachment(t, stack, 'MountPoint')
scheduler.TaskRunner(rsrc.delete)()
self.m.VerifyAll()
def test_volume_detach_with_latency(self):
fv = FakeVolume('creating', 'available')
fva = FakeVolume('attaching', 'in-use')
stack_name = 'test_volume_attach_stack'
self._mock_create_volume(fv, stack_name)
self._mock_create_server_volume_script(fva)
# delete script
volume_detach_cycle = 'in-use', 'detaching', 'available'
fva = FakeLatencyVolume(life_cycle=volume_detach_cycle)
self.fc.volumes.delete_server_volume(
'WikiDatabase', 'vol-123').MultipleTimes().AndReturn(None)
self.cinder_fc.volumes.get('vol-123').AndReturn(fva)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
scheduler.TaskRunner(stack['DataVolume'].create)()
self.assertEqual(fv.status, 'available')
rsrc = self.create_attachment(t, stack, 'MountPoint')
scheduler.TaskRunner(rsrc.delete)()
self.m.VerifyAll()
def test_volume_detach_with_error(self):
fv = FakeVolume('creating', 'available')
fva = FakeVolume('attaching', 'in-use')
stack_name = 'test_volume_attach_stack'
self._mock_create_volume(fv, stack_name)
self._mock_create_server_volume_script(fva)
# delete script
fva = FakeVolume('in-use', 'error')
self.fc.volumes.delete_server_volume('WikiDatabase',
'vol-123').AndReturn(None)
self.cinder_fc.volumes.get('vol-123').AndReturn(fva)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
scheduler.TaskRunner(stack['DataVolume'].create)()
self.assertEqual(fv.status, 'available')
rsrc = self.create_attachment(t, stack, 'MountPoint')
detach_task = scheduler.TaskRunner(rsrc.delete)
self.assertRaises(exception.ResourceFailure, detach_task)
self.m.VerifyAll()
def test_volume_delete(self):
stack_name = 'test_volume_stack'
fv = FakeVolume('creating', 'available')
fb = FakeBackup('creating', 'available')
self._mock_create_volume(fv, stack_name)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['DeletionPolicy'] = 'Delete'
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = self.create_volume(t, stack, 'DataVolume')
self.m.StubOutWithMock(rsrc, "handle_delete")
rsrc.handle_delete().AndReturn(None)
self.m.StubOutWithMock(rsrc, "check_delete_complete")
rsrc.check_delete_complete(mox.IgnoreArg()).AndReturn(True)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.destroy)()
self.m.VerifyAll()
@skipIf(volume_backups is None, 'unable to import volume_backups')
def test_snapshot(self):
stack_name = 'test_volume_stack'
fv = FakeVolume('creating', 'available')
fb = FakeBackup('creating', 'available')
self._mock_create_volume(fv, stack_name)
# snapshot script
self.m.StubOutWithMock(self.cinder_fc.backups, 'create')
self.cinder_fc.backups.create('vol-123').AndReturn(fb)
self.cinder_fc.volumes.get('vol-123').AndReturn(fv)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['DeletionPolicy'] = 'Snapshot'
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = self.create_volume(t, stack, 'DataVolume')
self._stubout_delete_volume(fv)
scheduler.TaskRunner(rsrc.destroy)()
self.m.VerifyAll()
@skipIf(volume_backups is None, 'unable to import volume_backups')
def test_snapshot_error(self):
stack_name = 'test_volume_stack'
fv = FakeVolume('creating', 'available')
fb = FakeBackup('creating', 'error')
self._mock_create_volume(fv, stack_name)
# snapshot script
self.cinder_fc.volumes.get('vol-123').AndReturn(fv)
self.m.StubOutWithMock(self.cinder_fc.backups, 'create')
self.cinder_fc.backups.create('vol-123').AndReturn(fb)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['DeletionPolicy'] = 'Snapshot'
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = self.create_volume(t, stack, 'DataVolume')
self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.destroy))
self.m.VerifyAll()
@skipIf(volume_backups is None, 'unable to import volume_backups')
def test_snapshot_no_volume(self):
stack_name = 'test_volume_stack'
fv = FakeVolume('creating', 'error')
self._mock_create_volume(fv, stack_name)
self.cinder_fc.volumes.get('vol-123').AndReturn(fv)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['DeletionPolicy'] = 'Snapshot'
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = vol.Volume('DataVolume',
t['Resources']['DataVolume'],
stack)
create = scheduler.TaskRunner(rsrc.create)
self.assertRaises(exception.ResourceFailure, create)
self._stubout_delete_volume(fv)
scheduler.TaskRunner(rsrc.destroy)()
self.m.VerifyAll()
@skipIf(volume_backups is None, 'unable to import volume_backups')
def test_create_from_snapshot(self):
stack_name = 'test_volume_stack'
fv = FakeVolumeWithStateTransition('restoring-backup', 'available')
fvbr = FakeBackupRestore('vol-123')
# create script
clients.OpenStackClients.cinder().MultipleTimes().AndReturn(
self.cinder_fc)
self.m.StubOutWithMock(self.cinder_fc.restores, 'restore')
self.cinder_fc.restores.restore('backup-123').AndReturn(fvbr)
self.cinder_fc.volumes.get('vol-123').AndReturn(fv)
self.m.StubOutWithMock(fv, 'update')
vol_name = utils.PhysName(stack_name, 'DataVolume')
fv.update(
display_description=vol_name,
display_name=vol_name)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['SnapshotId'] = 'backup-123'
stack = utils.parse_stack(t, stack_name=stack_name)
self.create_volume(t, stack, 'DataVolume')
self.assertEqual(fv.status, 'available')
self.m.VerifyAll()
@skipIf(volume_backups is None, 'unable to import volume_backups')
def test_create_from_snapshot_error(self):
stack_name = 'test_volume_stack'
fv = FakeVolumeWithStateTransition('restoring-backup', 'error')
fvbr = FakeBackupRestore('vol-123')
# create script
clients.OpenStackClients.cinder().MultipleTimes().AndReturn(
self.cinder_fc)
self.m.StubOutWithMock(self.cinder_fc.restores, 'restore')
self.cinder_fc.restores.restore('backup-123').AndReturn(fvbr)
self.cinder_fc.volumes.get('vol-123').AndReturn(fv)
self.m.StubOutWithMock(fv, 'update')
vol_name = utils.PhysName(stack_name, 'DataVolume')
fv.update(
display_description=vol_name,
display_name=vol_name)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['SnapshotId'] = 'backup-123'
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = vol.Volume('DataVolume',
t['Resources']['DataVolume'],
stack)
create = scheduler.TaskRunner(rsrc.create)
self.assertRaises(exception.ResourceFailure, create)
self.m.VerifyAll()
def test_cinder_create(self):
fv = FakeVolume('creating', 'available')
stack_name = 'test_volume_stack'
clients.OpenStackClients.cinder().MultipleTimes().AndReturn(
self.cinder_fc)
self.cinder_fc.volumes.create(
size=u'1', availability_zone='nova',
display_description='CustomDescription',
display_name='CustomName',
imageRef='Image1',
snapshot_id='snap-123',
metadata={'key': 'value'},
source_volid='vol-012',
volume_type='lvm').AndReturn(fv)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties'] = {
'size': '1',
'availability_zone': 'nova',
'name': 'CustomName',
'description': 'CustomDescription',
'volume_type': 'lvm',
'metadata': {'key': 'value'},
# Note that specifying all these arguments doesn't work in
# practice, as they are conflicting, but we just want to check they
# are sent to the backend.
'imageRef': 'Image1',
'snapshot_id': 'snap-123',
'source_volid': 'vol-012',
}
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = vol.CinderVolume('DataVolume',
t['Resources']['DataVolume'],
stack)
self.assertEqual(rsrc.validate(), None)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.assertEqual(fv.status, 'available')
self.m.VerifyAll()
def test_cinder_create_from_image(self):
fv = FakeVolumeWithStateTransition('downloading', 'available')
stack_name = 'test_volume_stack'
clients.OpenStackClients.cinder().MultipleTimes().AndReturn(
self.cinder_fc)
self.cinder_fc.volumes.create(
size=u'1', availability_zone='nova',
display_description='ImageVolumeDescription',
display_name='ImageVolume',
imageRef='Image1').AndReturn(fv)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties'] = {
'size': '1',
'name': 'ImageVolume',
'description': 'ImageVolumeDescription',
'availability_zone': 'nova',
'imageRef': 'Image1',
}
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = vol.CinderVolume('DataVolume',
t['Resources']['DataVolume'],
stack)
self.assertEqual(rsrc.validate(), None)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.assertEqual(fv.status, 'available')
self.m.VerifyAll()
def test_cinder_default(self):
fv = FakeVolume('creating', 'available')
stack_name = 'test_volume_stack'
clients.OpenStackClients.cinder().MultipleTimes().AndReturn(
self.cinder_fc)
vol_name = utils.PhysName(stack_name, 'DataVolume')
self.cinder_fc.volumes.create(
size=u'1', availability_zone='nova',
display_description=None,
display_name=vol_name).AndReturn(fv)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties'] = {
'size': '1',
'availability_zone': 'nova',
}
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = vol.CinderVolume('DataVolume',
t['Resources']['DataVolume'],
stack)
self.assertEqual(rsrc.validate(), None)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.assertEqual(fv.status, 'available')
self.m.VerifyAll()
def test_cinder_fn_getatt(self):
fv = FakeVolume('creating', 'available', availability_zone='zone1',
size=1, snapshot_id='snap-123', display_name='name',
display_description='desc', volume_type='lvm',
metadata={'key': 'value'}, source_volid=None,
status='available', bootable=False,
created_at='2013-02-25T02:40:21.000000')
stack_name = 'test_volume_stack'
clients.OpenStackClients.cinder().MultipleTimes().AndReturn(
self.cinder_fc)
vol_name = utils.PhysName(stack_name, 'DataVolume')
self.cinder_fc.volumes.create(
size=u'1', availability_zone='nova',
display_description=None,
display_name=vol_name).AndReturn(fv)
self.cinder_fc.volumes.get('vol-123').MultipleTimes().AndReturn(fv)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties'] = {
'size': '1',
'availability_zone': 'nova',
}
stack = utils.parse_stack(t, stack_name=stack_name)
rsrc = vol.CinderVolume('DataVolume',
t['Resources']['DataVolume'],
stack)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual(u'zone1', rsrc.FnGetAtt('availability_zone'))
self.assertEqual(u'1', rsrc.FnGetAtt('size'))
self.assertEqual(u'snap-123', rsrc.FnGetAtt('snapshot_id'))
self.assertEqual(u'name', rsrc.FnGetAtt('display_name'))
self.assertEqual(u'desc', rsrc.FnGetAtt('display_description'))
self.assertEqual(u'lvm', rsrc.FnGetAtt('volume_type'))
self.assertEqual(json.dumps({'key': 'value'}),
rsrc.FnGetAtt('metadata'))
self.assertEqual(u'None', rsrc.FnGetAtt('source_volid'))
self.assertEqual(u'available', rsrc.FnGetAtt('status'))
self.assertEqual(u'2013-02-25T02:40:21.000000',
rsrc.FnGetAtt('created_at'))
self.assertEqual(u'False', rsrc.FnGetAtt('bootable'))
error = self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'unknown')
self.assertEqual(
'The Referenced Attribute (DataVolume unknown) is incorrect.',
str(error))
self.m.VerifyAll()
def test_cinder_attachment(self):
fv = FakeVolume('creating', 'available')
fva = FakeVolume('attaching', 'in-use')
stack_name = 'test_volume_attach_stack'
self._mock_create_volume(fv, stack_name)
self._mock_create_server_volume_script(fva)
# delete script
fva = FakeVolume('in-use', 'available')
self.fc.volumes.delete_server_volume('WikiDatabase',
'vol-123').AndReturn(None)
self.cinder_fc.volumes.get('vol-123').AndReturn(fva)
self.m.ReplayAll()
t = template_format.parse(volume_template)
t['Resources']['DataVolume']['Properties']['AvailabilityZone'] = 'nova'
t['Resources']['MountPoint']['Properties'] = {
'instance_uuid': {'Ref': 'WikiDatabase'},
'volume_id': {'Ref': 'DataVolume'},
'mountpoint': '/dev/vdc'
}
stack = utils.parse_stack(t, stack_name=stack_name)
scheduler.TaskRunner(stack['DataVolume'].create)()
self.assertEqual(fv.status, 'available')
rsrc = vol.CinderVolumeAttachment('MountPoint',
t['Resources']['MountPoint'],
stack)
self.assertEqual(rsrc.validate(), None)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.assertRaises(resource.UpdateReplace, rsrc.handle_update,
{}, {}, {})
scheduler.TaskRunner(rsrc.delete)()
self.m.VerifyAll()
class FakeVolume(object):
status = 'attaching'
id = 'vol-123'
def __init__(self, initial_status, final_status, **attrs):
self.status = initial_status
self.final_status = final_status
for key, value in attrs.iteritems():
setattr(self, key, value)
def get(self):
self.status = self.final_status
def update(self, **kw):
pass
def delete(self):
pass
class FakeLatencyVolume(object):
status = 'attaching'
id = 'vol-123'
def __init__(self, life_cycle=('creating', 'available'), **attrs):
if not isinstance(life_cycle, tuple):
raise exception.Error('life_cycle need to be a tuple.')
if not len(life_cycle):
raise exception.Error('life_cycle should not be an empty tuple.')
self.life_cycle = iter(life_cycle)
self.status = next(self.life_cycle)
for key, value in attrs.iteritems():
setattr(self, key, value)
def get(self):
self.status = next(self.life_cycle)
def update(self, **kw):
pass
class FakeBackup(FakeVolume):
status = 'creating'
id = 'backup-123'
class FakeBackupRestore(object):
volume_id = 'vol-123'
def __init__(self, volume_id):
self.volume_id = volume_id
class FakeVolumeWithStateTransition(FakeVolume):
status = 'restoring-backup'
get_call_count = 0
def get(self):
# Allow get to be called once without changing the status
# This is to allow the check_create_complete method to
# check the inital status.
if self.get_call_count < 1:
self.get_call_count += 1
else:
self.status = self.final_status
| 36.324009 | 79 | 0.622955 | 3,304 | 31,166 | 5.700666 | 0.097458 | 0.038227 | 0.026759 | 0.025219 | 0.758906 | 0.71245 | 0.673852 | 0.651341 | 0.651341 | 0.620706 | 0 | 0.009176 | 0.251685 | 31,166 | 857 | 80 | 36.366394 | 0.798431 | 0.035487 | 0 | 0.643879 | 0 | 0.00159 | 0.175781 | 0.016319 | 0 | 0 | 0 | 0 | 0.077901 | 1 | 0.055644 | false | 0.006359 | 0.033386 | 0 | 0.116057 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
17659c9e5eddb82d57f4953e92dbe37b5d1b76f7 | 819 | py | Python | Python-For-Everyone-Horstmann/Chapter10-Inheritance/c19.py | islayy/Books-solutions | 5fe05deb4e9f65875284d8af43bd383bf9ae145b | [
"MIT"
] | null | null | null | Python-For-Everyone-Horstmann/Chapter10-Inheritance/c19.py | islayy/Books-solutions | 5fe05deb4e9f65875284d8af43bd383bf9ae145b | [
"MIT"
] | null | null | null | Python-For-Everyone-Horstmann/Chapter10-Inheritance/c19.py | islayy/Books-solutions | 5fe05deb4e9f65875284d8af43bd383bf9ae145b | [
"MIT"
] | 1 | 2021-01-30T22:19:07.000Z | 2021-01-30T22:19:07.000Z | # In the Manager class of Self Check 11, override the getName method so that managers
# have a * before their name (such as *Lin, Sally ).
class Employee():
def __init__(self, name="", base_salary=0.0):
self._name = name
self._base_salary = base_salary
def set_name(self, new_name):
self._name = new_name
def set_base_salary(self, new_salary):
self._base_salary = new_salary
def get_name(self):
return self._name
def get_salary(self):
return self._base_salary
class Manager(Employee):
def __init__(self, name="", base_salary=0.0, bonus=0.0):
super().__init__(name, base_salary)
self._bonus = bonus
def get_bonus(self):
return self._bonus
def get_name(self):
return "* {}".format(super.get_name())
| 24.818182 | 85 | 0.644689 | 117 | 819 | 4.179487 | 0.307692 | 0.163599 | 0.08589 | 0.07771 | 0.224949 | 0.143149 | 0.143149 | 0.143149 | 0.143149 | 0 | 0 | 0.012987 | 0.247863 | 819 | 32 | 86 | 25.59375 | 0.780844 | 0.163614 | 0 | 0.1 | 0 | 0 | 0.005865 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0.2 | 0.7 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
17682bc4b4395530ae930a02c6636c6d0a809aca | 3,001 | py | Python | src/commons/handlers/validators.py | Morgenz/bbq | f0fd3f626841c610aee80ad08a61123b7cccb775 | [
"Apache-2.0"
] | 41 | 2018-05-08T11:54:37.000Z | 2022-02-09T21:19:17.000Z | src/commons/handlers/validators.py | Morgenz/bbq | f0fd3f626841c610aee80ad08a61123b7cccb775 | [
"Apache-2.0"
] | 139 | 2018-06-07T13:45:21.000Z | 2021-04-30T20:44:06.000Z | src/commons/handlers/validators.py | Morgenz/bbq | f0fd3f626841c610aee80ad08a61123b7cccb775 | [
"Apache-2.0"
] | 5 | 2019-09-11T12:28:24.000Z | 2022-02-04T21:38:29.000Z | import re
from src.commons.exceptions import ParameterValidationException
class WrongDatasetNameException(Exception):
pass
class WrongProjectNameException(Exception):
pass
class WrongWriteDispositionException(Exception):
pass
class WrongCreateDispositionException(Exception):
pass
project_id_pattern = re.compile("^[a-zA-Z0-9-]+$")
dataset_id_pattern = re.compile("^[a-zA-Z0-9_]+$")
available_create_dispositions = ["CREATE_IF_NEEDED", "CREATE_NEVER"]
available_write_dispositions = ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"]
def validate_restore_request_params(
source_project_id=None, source_dataset_id=None,
target_project_id=None, target_dataset_id=None,
create_disposition=None, write_disposition=None):
try:
if source_project_id:
validate_project_id(source_project_id)
if source_dataset_id:
validate_dataset_id(source_dataset_id)
if target_project_id:
validate_project_id(target_project_id)
if target_dataset_id:
validate_dataset_id(target_dataset_id)
if write_disposition:
validate_write_disposition(write_disposition)
if create_disposition:
validate_create_disposition(create_disposition)
except (WrongDatasetNameException,
WrongProjectNameException,
WrongWriteDispositionException,
WrongCreateDispositionException), e:
raise ParameterValidationException(e.message)
def validate_project_id(project_id):
if not project_id or not project_id_pattern.match(project_id):
error_message = "Invalid project value: '{}'. Project IDs may " \
"contain letters, numbers, and " \
"dash".format(project_id)
raise WrongProjectNameException(error_message)
def validate_dataset_id(dataset_id):
if not dataset_id or not dataset_id_pattern.match(dataset_id):
error_message = "Invalid dataset value: '{}'. Dataset IDs may " \
"contain letters, numbers, and " \
"underscores".format(dataset_id)
raise WrongDatasetNameException(error_message)
def validate_write_disposition(write_disposition):
if write_disposition not in available_write_dispositions:
error_message = "Invalid write disposition: '{}'. " \
"The following values are supported: {}." \
.format(write_disposition, ', '.join(available_write_dispositions))
raise WrongWriteDispositionException(error_message)
def validate_create_disposition(create_disposition):
if create_disposition not in available_create_dispositions:
error_message = "Invalid create disposition: '{}'. " \
"The following values are supported: {}." \
.format(create_disposition,
', '.join(available_create_dispositions))
raise WrongCreateDispositionException(error_message)
| 35.305882 | 80 | 0.696768 | 299 | 3,001 | 6.652174 | 0.230769 | 0.067873 | 0.036199 | 0.034691 | 0.238311 | 0.143791 | 0.071393 | 0.024133 | 0 | 0 | 0 | 0.001724 | 0.226924 | 3,001 | 84 | 81 | 35.72619 | 0.855603 | 0 | 0 | 0.131148 | 0 | 0 | 0.136288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.065574 | 0.032787 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
176a64869862f6d182abc1b03cc96af768ff9d14 | 596 | py | Python | boundlexx/api/db.py | AngellusMortis/boundlexx | 407f5e38e8e0f067cbcb358787fc9af6a9be9b2a | [
"MIT"
] | 1 | 2021-04-23T11:49:50.000Z | 2021-04-23T11:49:50.000Z | boundlexx/api/db.py | AngellusMortis/boundlexx | 407f5e38e8e0f067cbcb358787fc9af6a9be9b2a | [
"MIT"
] | 1 | 2021-04-17T18:17:12.000Z | 2021-04-17T18:17:12.000Z | boundlexx/api/db.py | AngellusMortis/boundlexx | 407f5e38e8e0f067cbcb358787fc9af6a9be9b2a | [
"MIT"
] | null | null | null | from django.db.models.aggregates import Aggregate
from django.db.models.functions.mixins import (
FixDurationInputMixin,
NumericOutputFieldMixin,
)
class Mode(
FixDurationInputMixin, NumericOutputFieldMixin, Aggregate
): # pylint: disable=abstract-method
template = "%(function)s() WITHIN GROUP (ORDER BY %(expressions)s)"
function = "mode"
name = "Mode"
allow_distinct = False
class Median(
FixDurationInputMixin, NumericOutputFieldMixin, Aggregate
): # pylint: disable=abstract-method
function = "median"
name = "Median"
allow_distinct = False
| 25.913043 | 71 | 0.729866 | 57 | 596 | 7.596491 | 0.526316 | 0.30485 | 0.055427 | 0.083141 | 0.369515 | 0.369515 | 0.369515 | 0 | 0 | 0 | 0 | 0 | 0.176175 | 596 | 22 | 72 | 27.090909 | 0.881874 | 0.105705 | 0 | 0.333333 | 0 | 0 | 0.139623 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.611111 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
177fb1050825566896f7bfa763e59a5bb2eb52af | 1,805 | py | Python | tests/lib/bes/common/test_algorithm.py | reconstruir/bes | 82ff54b2dadcaef6849d7de424787f1dedace85c | [
"Apache-2.0"
] | null | null | null | tests/lib/bes/common/test_algorithm.py | reconstruir/bes | 82ff54b2dadcaef6849d7de424787f1dedace85c | [
"Apache-2.0"
] | null | null | null | tests/lib/bes/common/test_algorithm.py | reconstruir/bes | 82ff54b2dadcaef6849d7de424787f1dedace85c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#-*- coding:utf-8 -*-
#
import unittest
from bes.common.algorithm import algorithm
from bes.compat.cmp import cmp
class test_algorithm(unittest.TestCase):
def test_remove_empties(self):
self.assertEqual( [ 'x' ], algorithm.remove_empties([ 'x', ]) )
self.assertEqual( [ 'x' ], algorithm.remove_empties([ 'x', None ]) )
self.assertEqual( [ 'x' ], algorithm.remove_empties([ 'x', [] ]) )
self.assertEqual( [ 'x' ], algorithm.remove_empties([ 'x', () ]) )
self.assertEqual( [ 'x', 0 ], algorithm.remove_empties([ 'x', 0 ]) )
self.assertEqual( [ 'x', 0.0 ], algorithm.remove_empties([ 'x', 0.0 ]) )
self.assertEqual( [ 'x', False ], algorithm.remove_empties([ 'x', False ]) )
def test_unique(self):
self.assertEqual( [ 'a', 'b', 'c' ], algorithm.unique([ 'a', 'b', 'c' ]) )
self.assertEqual( [ 'a', 'b', 'c' ], algorithm.unique([ 'a', 'b', 'c', 'c' ]) )
self.assertEqual( [ 'c', 'a', 'b' ], algorithm.unique([ 'c', 'a', 'b', 'c' ]) )
def test_not_unique(self):
self.assertEqual( [], algorithm.not_unique([ 'a', 'b', 'c' ]) )
self.assertEqual( [ 'c' ], algorithm.not_unique([ 'a', 'b', 'c', 'c' ]) )
self.assertEqual( [ 'c' ], algorithm.not_unique([ 'c', 'a', 'b', 'c' ]) )
def test_binary_search(self):
a = [ 1, 5, 7, 9, 20, 1000, 1001, 1002, 3000 ]
comp = lambda a, b: cmp(a, b)
self.assertEqual( 0, algorithm.binary_search(a, 1, comp) )
self.assertEqual( -1, algorithm.binary_search(a, 0, comp) )
self.assertEqual( -1, algorithm.binary_search(a, 2, comp) )
self.assertEqual( 4, algorithm.binary_search(a, 20, comp) )
self.assertEqual( 8, algorithm.binary_search(a, 3000, comp) )
self.assertEqual( -1, algorithm.binary_search(a, 3001, comp) )
if __name__ == "__main__":
unittest.main()
| 44.02439 | 83 | 0.60554 | 239 | 1,805 | 4.439331 | 0.213389 | 0.268615 | 0.02262 | 0.151744 | 0.525919 | 0.525919 | 0.459943 | 0.389255 | 0.229972 | 0.229972 | 0 | 0.032323 | 0.177285 | 1,805 | 40 | 84 | 45.125 | 0.682155 | 0.022161 | 0 | 0 | 0 | 0 | 0.031215 | 0 | 0 | 0 | 0 | 0 | 0.612903 | 1 | 0.129032 | false | 0 | 0.096774 | 0 | 0.258065 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
17964d45ed6662154b8f724b7797402751cf78af | 297 | py | Python | aio_yamlconfig/trafarets.py | webknjaz/aio_yamlconfig | 3c7d345764fd71882629e0e5d1b3d54402ce218b | [
"BSD-3-Clause"
] | 2 | 2016-10-27T06:13:10.000Z | 2017-08-14T09:08:34.000Z | aio_yamlconfig/trafarets.py | webknjaz/aio_yamlconfig | 3c7d345764fd71882629e0e5d1b3d54402ce218b | [
"BSD-3-Clause"
] | 1 | 2019-06-29T11:05:22.000Z | 2019-06-29T11:05:22.000Z | aio_yamlconfig/trafarets.py | webknjaz/aio_yamlconfig | 3c7d345764fd71882629e0e5d1b3d54402ce218b | [
"BSD-3-Clause"
] | 3 | 2016-10-06T07:14:01.000Z | 2017-05-30T04:33:50.000Z | import os
import trafaret as t
class ExistingDirectory(t.String):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def check_value(self, value):
if not os.path.isdir(value):
raise t.DataError(error='{} is not directory'.format(value))
| 22.846154 | 72 | 0.643098 | 39 | 297 | 4.666667 | 0.666667 | 0.10989 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.218855 | 297 | 12 | 73 | 24.75 | 0.784483 | 0 | 0 | 0 | 0 | 0 | 0.063973 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
1799523b5d4965aa275e8378f3fee23ce24dc5ba | 595 | py | Python | aiocloudflare/api/user/tokens/tokens.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 2 | 2021-09-14T13:20:55.000Z | 2022-02-24T14:18:24.000Z | aiocloudflare/api/user/tokens/tokens.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 46 | 2021-09-08T08:39:45.000Z | 2022-03-29T12:31:05.000Z | aiocloudflare/api/user/tokens/tokens.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 1 | 2021-12-30T23:02:23.000Z | 2021-12-30T23:02:23.000Z | from aiocloudflare.commons.auth import Auth
from .permission_groups.permission_groups import PermissionGroups
from .value.value import Value
from .verify.verify import Verify
class Tokens(Auth):
_endpoint1 = "user/tokens"
_endpoint2 = None
_endpoint3 = None
@property
def value(self) -> Value:
return Value(self._config, self._session)
@property
def verify(self) -> Verify:
return Verify(self._config, self._session)
@property
def permission_groups(self) -> PermissionGroups:
return PermissionGroups(self._config, self._session)
| 24.791667 | 65 | 0.719328 | 67 | 595 | 6.208955 | 0.343284 | 0.115385 | 0.100962 | 0.151442 | 0.153846 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0.006276 | 0.196639 | 595 | 23 | 66 | 25.869565 | 0.864017 | 0 | 0 | 0.176471 | 0 | 0 | 0.018487 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.176471 | false | 0 | 0.235294 | 0.176471 | 0.823529 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
179de16a8ab7591a4c7b889f0faffccc27dd30c2 | 433 | py | Python | Simulador de cajero.py | alvarado0211-sys/Primeros-Proyectos | f45ba9875e83eb1790fb6fc6b393168cace7649b | [
"MIT"
] | 1 | 2021-03-05T14:32:05.000Z | 2021-03-05T14:32:05.000Z | Simulador de cajero.py | alvarado0211-sys/Primeros-Proyectos | f45ba9875e83eb1790fb6fc6b393168cace7649b | [
"MIT"
] | null | null | null | Simulador de cajero.py | alvarado0211-sys/Primeros-Proyectos | f45ba9875e83eb1790fb6fc6b393168cace7649b | [
"MIT"
] | null | null | null | print("Bienvenido al cajero automatico de este banco")
print()
usuario=int(input("Ingrese la cantidad de dinero que desea\n"))
cant500 = usuario // 500
resto500 = usuario % 500
cant200 = resto500 // 200
resto200 = resto500 % 200
cant100 = resto200 // 100
resto100 = resto200 % 100
print("cant de billetes de 500: ", cant500)
print()
print("cant de billetes de 200: ", cant200)
print()
print("cant de billetes de 100: ", cant100)
| 22.789474 | 63 | 0.715935 | 61 | 433 | 5.081967 | 0.47541 | 0.087097 | 0.106452 | 0.183871 | 0.235484 | 0.167742 | 0 | 0 | 0 | 0 | 0 | 0.182825 | 0.166282 | 433 | 18 | 64 | 24.055556 | 0.6759 | 0 | 0 | 0.214286 | 0 | 0 | 0.371824 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
bd57ca26cd9515c0c18f116dd16f497822831d0f | 1,374 | py | Python | CA-project/libOverhead/calc.py | simewu/openssl | b4b95c47e8efc25630bd3db91aa1977b591317a1 | [
"OpenSSL"
] | null | null | null | CA-project/libOverhead/calc.py | simewu/openssl | b4b95c47e8efc25630bd3db91aa1977b591317a1 | [
"OpenSSL"
] | null | null | null | CA-project/libOverhead/calc.py | simewu/openssl | b4b95c47e8efc25630bd3db91aa1977b591317a1 | [
"OpenSSL"
] | null | null | null | import os
import time
openssl_dir = os.path.expanduser('~/openssl')
myCmd = f'{openssl_dir}/apps/openssl ecparam -out > /dev/null 2>&1'
startTime = time.time()
for i in range(1000):
os.system(myCmd)
endTime = time.time()
print (f'Time for ECDSA ECPARAM {(endTime-startTime)/1000}')
myCmd = f'{openssl_dir}/apps/openssl req -x509 -new -newkey > /dev/null 2>&1'
startTime = time.time()
for i in range(1000):
os.system(myCmd)
endTime = time.time()
print (f'Time for CA key and cert API {(endTime-startTime)/1000}')
myCmd = f'{openssl_dir}/apps/openssl genpkey > /dev/null 2>&1'
startTime = time.time()
for i in range(1000):
os.system(myCmd)
endTime = time.time()
print (f'Time API for Server private key {(endTime-startTime)/1000}')
myCmd = f'apps/openssl req -new -key > /dev/null 2>&1'
startTime = time.time()
for i in range(1000):
os.system(myCmd)
endTime = time.time()
print (f'Time API for CSR gen {(endTime-startTime)/1000}')
myCmd = f'apps/openssl x509 -req -in > /dev/null 2>&1'
startTime = time.time()
for i in range(1000):
os.system(myCmd)
endTime = time.time()
print (f'Time API for cert gen {(endTime-startTime)/1000}')
myCmd = f'{openssl_dir}/apps/openssl verify -CAfile > /dev/null 2>&1'
startTime = time.time()
for i in range(1000):
os.system(myCmd)
endTime = time.time()
print (f'Time for verify API {(endTime-startTime)/1000}') | 29.869565 | 77 | 0.689229 | 226 | 1,374 | 4.168142 | 0.185841 | 0.101911 | 0.050955 | 0.057325 | 0.795117 | 0.795117 | 0.760085 | 0.681529 | 0.681529 | 0.531847 | 0 | 0.05617 | 0.144833 | 1,374 | 46 | 78 | 29.869565 | 0.745532 | 0 | 0 | 0.615385 | 0 | 0.025641 | 0.457455 | 0.189091 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.051282 | 0 | 0.051282 | 0.153846 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bd684fb90db3582099f8d410ccfc5434445c4d14 | 6,318 | py | Python | django_workflow_system/utils/logging_utils.py | eikonomega/django-workflow-system | dc0e8807263266713d3d7fa46e240e8d72db28d1 | [
"MIT"
] | 2 | 2022-01-28T12:35:42.000Z | 2022-03-23T16:06:05.000Z | django_workflow_system/utils/logging_utils.py | eikonomega/django-workflow-system | dc0e8807263266713d3d7fa46e240e8d72db28d1 | [
"MIT"
] | 10 | 2021-04-27T20:26:32.000Z | 2021-07-21T15:34:31.000Z | django_workflow_system/utils/logging_utils.py | eikonomega/django-workflow-system | dc0e8807263266713d3d7fa46e240e8d72db28d1 | [
"MIT"
] | 1 | 2021-11-13T14:30:34.000Z | 2021-11-13T14:30:34.000Z | from rest_framework.request import Request
def strip_sensitive_data(data: dict):
"""
Function that strips request data before it is sent to our graylog server.
"""
sensitive_keys = ("password", "response", "user_responses")
# don't overwrite request!
data = dict(data)
# Scrub Request Data
for key in sensitive_keys:
if key in data:
data[key] = "[CENSORED]"
return str(data)
def find_property(property, *source_list, default):
"""
Searches through a list of objects for one having the given property with a non-none
value.
Parameters
----------
property
name of the property
source_list
list of objects to search through
default
default value for the property
Returns
-------
the first found value of the property, if found
"""
if default:
return default
for source in source_list:
if source is not None and getattr(source, property, None):
return getattr(source, property)
def generate_extra(
*, # the following must be presented as keyword args
event_code=None,
request: Request = None,
user=None,
activity=None,
activity_assignment=None,
workflow_collection=None,
workflow_collection_assignment=None,
workflow_collection_engagement=None,
workflow_collection_engagement_detail=None,
workflow_collection_subscription=None,
serializer_errors=None,
**kwargs,
):
"""
Extracts useful information from WW objects into a flat dict for GELF logging.
Parameters
----------
event_code: str
a static code representing the type of event which occured
request: Request
a request
user: User
a user
activity: Activity
a map your day activity
activity_assignment: ActivityAssignment
a map your day activity assignment
workflow_collection: WorkflowCollection
a workflow collection
workflow_collection_assignment: WorkflowCollectionAssignment
a workflow collection assignment
workflow_collection_engagement: WorkflowCollectionEngagement
a workflow collection engagement
workflow_collection_engagement_detail: WorkflowCollectionEngagementDetail
a workflow collection engagement detail
workflow_collection_subscription: WorkflowCollectionSubscription
a workflow collection subscription
serializer_errors:
serializer errors
kwargs
any additional arguments
Returns
-------
extra: dict
"""
extra = dict(kwargs)
if event_code:
extra["event_code"] = event_code
if serializer_errors:
extra["serializer_errors"] = serializer_errors
# automatically add missing info
workflow_collection_engagement = find_property(
"workflow_collection_engagement",
workflow_collection_engagement_detail,
default=workflow_collection_engagement,
)
workflow_collection = find_property(
"workflow_collection",
workflow_collection_assignment,
workflow_collection_subscription,
workflow_collection_engagement,
default=workflow_collection,
)
activity = find_property(
"activity",
activity_assignment,
default=activity,
)
user = find_property(
"user",
request,
workflow_collection_assignment,
workflow_collection_subscription,
workflow_collection_engagement,
activity_assignment,
default=user,
)
# handle request
if request:
extra["request__path"] = request.path
extra["request__method"] = request.method
if hasattr(request, "query_params") and request.query_params:
extra["request__query_params"] = request.query_params
if hasattr(request, "data") and request.data:
extra["request__data"] = strip_sensitive_data(request.data)
# handle user
if user:
extra["user__username"] = user.username
extra["user__id"] = user.id
if activity:
extra["activity__id"] = activity.id
extra["activity__name"] = activity.name
if activity_assignment:
extra["activity_assignment__id"] = activity_assignment.id
extra[
"activity_assignment__associated_date"
] = activity_assignment.associated_date
if workflow_collection:
extra["workflow_collection__id"] = workflow_collection.id
extra["workflow_collection__code"] = workflow_collection.code
extra["workflow_collection__version"] = workflow_collection.version
extra["workflow_collection__category"] = workflow_collection.category
if workflow_collection_assignment:
extra["workflow_collection_assignment__id"] = workflow_collection_assignment.id
extra[
"workflow_collection_assignment__start"
] = workflow_collection_assignment.start
extra[
"workflow_collection_assignment__status"
] = workflow_collection_assignment.status
if workflow_collection_engagement:
extra["workflow_collection_engagement__id"] = workflow_collection_engagement.id
extra[
"workflow_collection_engagement__started"
] = workflow_collection_engagement.started
extra[
"workflow_collection_engagement__finished"
] = workflow_collection_engagement.finished
if workflow_collection_engagement_detail:
extra[
"workflow_collection_engagement_detail__id"
] = workflow_collection_engagement_detail.id
extra[
"workflow_collection_engagement_detail__step__code"
] = workflow_collection_engagement_detail.step.code
extra[
"workflow_collection_engagement_detail__started"
] = workflow_collection_engagement_detail.started
extra[
"workflow_collection_engagement_detail__finished"
] = workflow_collection_engagement_detail.finished
if workflow_collection_subscription:
extra[
"workflow_collection_subscription__id"
] = workflow_collection_subscription.id
extra[
"workflow_collection_subscription__active"
] = workflow_collection_subscription.active
return extra
| 31.59 | 88 | 0.6912 | 624 | 6,318 | 6.655449 | 0.211538 | 0.281724 | 0.188779 | 0.106429 | 0.218396 | 0.0915 | 0.071274 | 0.041416 | 0.041416 | 0 | 0 | 0 | 0.246597 | 6,318 | 199 | 89 | 31.748744 | 0.872479 | 0.23441 | 0 | 0.158333 | 0 | 0 | 0.194547 | 0.150617 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025 | false | 0.008333 | 0.008333 | 0 | 0.066667 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bd6f1e30b50458d7f7e8f76afb5a01fa412d461d | 120 | py | Python | mef2_processor/run.py | Pennsieve/timeseries-processor | 85766afa76182503fd66cec8382c22e757743f01 | [
"Apache-2.0"
] | null | null | null | mef2_processor/run.py | Pennsieve/timeseries-processor | 85766afa76182503fd66cec8382c22e757743f01 | [
"Apache-2.0"
] | null | null | null | mef2_processor/run.py | Pennsieve/timeseries-processor | 85766afa76182503fd66cec8382c22e757743f01 | [
"Apache-2.0"
] | null | null | null |
from mef2_processor import MEF2Processor
if __name__ == '__main__':
task = MEF2Processor(cli=True)
task.run()
| 17.142857 | 40 | 0.716667 | 14 | 120 | 5.5 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.030612 | 0.183333 | 120 | 6 | 41 | 20 | 0.755102 | 0 | 0 | 0 | 0 | 0 | 0.067227 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bd83ef1bfc81e8452eb803079808aa53ab30215c | 3,891 | py | Python | jaraco/structures/binary.py | jaraco/jaraco.structures | 3b435db2b49906fb2fe07ff9ab498eb6841072a8 | [
"MIT"
] | null | null | null | jaraco/structures/binary.py | jaraco/jaraco.structures | 3b435db2b49906fb2fe07ff9ab498eb6841072a8 | [
"MIT"
] | null | null | null | jaraco/structures/binary.py | jaraco/jaraco.structures | 3b435db2b49906fb2fe07ff9ab498eb6841072a8 | [
"MIT"
] | null | null | null | import numbers
from functools import reduce
def get_bit_values(number, size=32):
"""
Get bit values as a list for a given number
>>> get_bit_values(1) == [0]*31 + [1]
True
>>> get_bit_values(0xDEADBEEF)
[1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, \
1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1]
You may override the default word size of 32-bits to match your actual
application.
>>> get_bit_values(0x3, 2)
[1, 1]
>>> get_bit_values(0x3, 4)
[0, 0, 1, 1]
"""
number += 2 ** size
return list(map(int, bin(number)[-size:]))
def gen_bit_values(number):
"""
Return a zero or one for each bit of a numeric value up to the most
significant 1 bit, beginning with the least significant bit.
>>> list(gen_bit_values(16))
[0, 0, 0, 0, 1]
"""
digits = bin(number)[2:]
return map(int, reversed(digits))
def coalesce(bits):
"""
Take a sequence of bits, most significant first, and
coalesce them into a number.
>>> coalesce([1,0,1])
5
"""
def operation(a, b):
return a << 1 | b
return reduce(operation, bits)
class Flags:
"""
Subclasses should define _names, a list of flag names beginning
with the least-significant bit.
>>> class MyFlags(Flags):
... _names = 'a', 'b', 'c'
>>> mf = MyFlags.from_number(5)
>>> mf['a']
1
>>> mf['b']
0
>>> mf['c'] == mf[2]
True
>>> mf['b'] = 1
>>> mf['a'] = 0
>>> mf.number
6
"""
def __init__(self, values):
self._values = list(values)
if hasattr(self, '_names'):
n_missing_bits = len(self._names) - len(self._values)
self._values.extend([0] * n_missing_bits)
@classmethod
def from_number(cls, number):
return cls(gen_bit_values(number))
@property
def number(self):
return coalesce(reversed(self._values))
def __setitem__(self, key, value):
# first try by index, then by name
try:
self._values[key] = value
except TypeError:
index = self._names.index(key)
self._values[index] = value
def __getitem__(self, key):
# first try by index, then by name
try:
return self._values[key]
except TypeError:
index = self._names.index(key)
return self._values[index]
class BitMask(type):
"""
A metaclass to create a bitmask with attributes. Subclass an int and
set this as the metaclass to use.
Construct such a class:
>>> class MyBits(int, metaclass=BitMask):
... a = 0x1
... b = 0x4
... c = 0x3
>>> b1 = MyBits(3)
>>> b1.a, b1.b, b1.c
(True, False, True)
>>> b2 = MyBits(8)
>>> any([b2.a, b2.b, b2.c])
False
If the instance defines methods, they won't be wrapped in
properties.
>>> class MyBits(int, metaclass=BitMask):
... a = 0x1
... b = 0x4
... c = 0x3
...
... @classmethod
... def get_value(cls):
... return 'some value'
...
... @property
... def prop(cls):
... return 'a property'
>>> MyBits(3).get_value()
'some value'
>>> MyBits(3).prop
'a property'
"""
def __new__(cls, name, bases, attrs):
def make_property(name, value):
if name.startswith('_') or not isinstance(value, numbers.Number):
return value
return property(lambda self, value=value: bool(self & value))
newattrs = dict(
(name, make_property(name, value)) for name, value in attrs.items()
)
return type.__new__(cls, name, bases, newattrs)
| 24.783439 | 80 | 0.523516 | 504 | 3,891 | 3.920635 | 0.287698 | 0.017206 | 0.013664 | 0.012146 | 0.159919 | 0.159413 | 0.122976 | 0.07085 | 0.04251 | 0.04251 | 0 | 0.039407 | 0.3413 | 3,891 | 156 | 81 | 24.942308 | 0.731565 | 0.4477 | 0 | 0.130435 | 0 | 0 | 0.004182 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.23913 | false | 0 | 0.043478 | 0.065217 | 0.565217 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
bd8ea0e2ee0e29cb0eeaa11ff2fec214aa6feca6 | 902 | py | Python | main.py | JoaoAPS/PlanetSimulation | ada6090df08146030fffe909e75f604ed6099c48 | [
"MIT"
] | null | null | null | main.py | JoaoAPS/PlanetSimulation | ada6090df08146030fffe909e75f604ed6099c48 | [
"MIT"
] | null | null | null | main.py | JoaoAPS/PlanetSimulation | ada6090df08146030fffe909e75f604ed6099c48 | [
"MIT"
] | null | null | null | from src.App import App
from src.Planet import Planet
from src.vecN import Vec3
def main():
# 8 loop
# pos = 10 * Vec3(-0.97000436, 0.24308753)
# vel2 = 10 * Vec3(-0.93240737, -0.86473146)
# vel13 = 10 * Vec3(0.4662036850, 0.4323657300)
# planets = [
# Planet(1000, pos, vel13, (200, 20, 20)),
# Planet(1000, Vec3(), vel2, (20, 200, 20)),
# Planet(1000, -pos, vel13, (20, 20, 200))
# ]
planets = [
Planet(1000, Vec3(50), Vec3(-10, 5), (200, 20, 20)),
Planet(1000, Vec3(5, -15), Vec3(7, 0), (20, 200, 20)),
Planet(1000, Vec3(0, 30), Vec3(1, -5), (20, 20, 200)),
]
planets = [
Planet(5000, Vec3(20), Vec3(-5, -5), (200, 20, 20)),
Planet(5000, Vec3(-20), Vec3(5, 5), (20, 200, 20)),
]
app = App()
app.universe.setPlanets(planets)
app.run()
if __name__ == '__main__':
main()
| 23.128205 | 62 | 0.522173 | 128 | 902 | 3.617188 | 0.296875 | 0.12959 | 0.103672 | 0.084233 | 0.349892 | 0.185745 | 0.095032 | 0 | 0 | 0 | 0 | 0.305684 | 0.278271 | 902 | 38 | 63 | 23.736842 | 0.40553 | 0.318182 | 0 | 0.111111 | 0 | 0 | 0.013223 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.166667 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bd913e37450eb4ed4c3feecd1989472868577889 | 53 | py | Python | src/IceRayPy/core/material/instruction/label/color/const.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | 2 | 2020-09-04T12:27:15.000Z | 2022-01-17T14:49:40.000Z | src/IceRayPy/core/material/instruction/label/color/const.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | null | null | null | src/IceRayPy/core/material/instruction/label/color/const.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | 1 | 2020-09-04T12:27:52.000Z | 2020-09-04T12:27:52.000Z | _BEGIN = 0
BLACK=0
WHITE=1
GRAY=2
_END = 12 | 6.625 | 11 | 0.566038 | 10 | 53 | 2.8 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.171429 | 0.339623 | 53 | 8 | 12 | 6.625 | 0.628571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bda78acd12b5de5e1969c79dfae59de3e2a7f846 | 8,926 | py | Python | pythonnotes5.py | burgeaer/python | 0cd0c1ea1a5849b8bc32ace9b2e3b67392ff6b79 | [
"bzip2-1.0.6"
] | null | null | null | pythonnotes5.py | burgeaer/python | 0cd0c1ea1a5849b8bc32ace9b2e3b67392ff6b79 | [
"bzip2-1.0.6"
] | null | null | null | pythonnotes5.py | burgeaer/python | 0cd0c1ea1a5849b8bc32ace9b2e3b67392ff6b79 | [
"bzip2-1.0.6"
] | null | null | null | Python 3.8.0 (tags/v3.8.0:fa919fd, Oct 14 2019, 19:37:50) [MSC v.1916 64 bit (AMD64)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>> from cImage import *
>>> p = Pixel(200, 100, 150)
>>> p
(200, 100, 150)
>>> p.getRed()
200
>>> p.getGreen()
100
>>> p.setBlue(20)
>>> p
(200, 100, 20)
>>> from cImage import *
>>> myWin = ImageWin("Butterfly", 300, 224)
>>> butterfly = FileImage("butterfly.gif")
>>> butterfly.draw(myWin)
>>> butterfly.getWidth()
300
>>> butterfly.getHeight()
215
>>> butterfly.getPixel(124, 165)
Traceback (most recent call last):
File "<pyshell#13>", line 1, in <module>
butterfly.getPixel(124, 165)
File "C:\Program Files\Python38\lib\site-packages\cImage.py", line 310, in getTkPixel
p = [int(j) for j in self.im.get(x,y).split()]
AttributeError: 'tuple' object has no attribute 'split'
>>> butterfly.getPixel(124, 165)
Traceback (most recent call last):
File "<pyshell#14>", line 1, in <module>
butterfly.getPixel(124, 165)
File "C:\Program Files\Python38\lib\site-packages\cImage.py", line 310, in getTkPixel
p = [int(j) for j in self.im.get(x,y).split()]
AttributeError: 'tuple' object has no attribute 'split'
>>> myImWin = ImageWin("Empty Image", 300, 300)
>>> emptyIm = EmptyImage(300, 300)
>>> emptyIm.draw(myImWin)
>>> myImWin = ImageWin("Line Image", 300, 300)
>>> lineImage = EmptyImage(300, 300)
>>> whitePixel = Pixel(255, 255, 255)
>>> for i in range(lineImage.getheight()):
lineImage.setPixel(i, i, whitePixel)
Traceback (most recent call last):
File "<pyshell#23>", line 1, in <module>
for i in range(lineImage.getheight()):
AttributeError: 'EmptyImage' object has no attribute 'getheight'
>>> for i in range(lineImage.getHeight()):
lineImage.setPixel(i, i, whitePixel)
>>> lineImage.draw(myImWin)
>>> lineImage.save("lineImage.gif)
SyntaxError: EOL while scanning string literal
>>> lineImage.save("lineImage.gif")
>>> def negativePixel(oldPixel):
newRed = 255 - oldPixel.getRed()
newGreen = 255 - oldPixel.getGreen()
newBlue = 255 - oldPixel.getBlue()
newPixel = Pixel(newRed, newGreen, newBlue)
return newPixel
>>> def makeNegative(imageFile):
oldImage = FileImage(imageFile)
width = oldImage.getWidth()
height = oldImage.getHeight()
myImageWindow = ImageWin("Negative Image", width * 2, height)
oldImage.draw(myImageWindow)
newIm = EmptyImage(width, height)
for row in range(height):
for col in range(width):
oldPixel = oldImage.getPixel(col, row)
newPixel = negativePixel(oldPixel)
newIm.setPixel(col, row, newPixel)
newIm.setPosition(width + 1, 0)
newIm.draw(myImageWindow)
myImageWindow.exitOnClick()
>>> makeNegative("butterfly.gif")
Traceback (most recent call last):
File "<pyshell#52>", line 1, in <module>
makeNegative("butterfly.gif")
File "<pyshell#51>", line 10, in makeNegative
oldPixel = oldImage.getPixel(col, row)
File "C:\Program Files\Python38\lib\site-packages\cImage.py", line 310, in getTkPixel
p = [int(j) for j in self.im.get(x,y).split()]
AttributeError: 'tuple' object has no attribute 'split'
>>>
================ RESTART: C:/Program Files/Python38/negative.py ================
>>> makeNegative("butterfly.gif")
>>> def grayPixel(oldPixel):
intensifySum = oldPixel.getRed() + oldPixel.getGreen() + oldPixel.getBlue()
aveRGB = intensitySum // 3
newPixel = Pixel(aveRGB, aveRGB, aveRGB)
return newPixel
>>> def makeGrayScale(imageFile):
oldImage = FileImage(imageFile)
width = oldImage.getWidth()
height = oldImage.getHeight()
myImageWindow = ImageWin("GrayScale", width * 2, height)
oldImage.draw(myImageWindow)
newIm = EmptyImage(width, height)
for row in range(height):
for col in range(width):
oldPixel = oldImage.getPixel(col, row)
newPixel = grayPixel(oldPixel)
newIm.setPixel(col, row, newPixel)
newIm.setPosition(width + 1, 0)
newIm.draw(myImageWindow)
myImageWindow.exitOnClick()
>>> makeGrayScale("butterfly.gif")
Traceback (most recent call last):
File "<pyshell#76>", line 1, in <module>
makeGrayScale("butterfly.gif")
File "<pyshell#75>", line 11, in makeGrayScale
newPixel = grayPixel(oldPixel)
File "<pyshell#59>", line 3, in grayPixel
aveRGB = intensitySum // 3
NameError: name 'intensitySum' is not defined
>>> def grayPixel(oldPixel):
intensifySum = oldPixel.getRed() + oldPixel.getGreen() + oldPixel.getBlue()
aveRGB = intensifySum // 3
newPixel = Pixel(aveRGB, aveRGB, aveRGB)
return newPixel
>>> makeGrayScale("butterfly.gif")
>>> def pixelMapper(fileImage, rgbFunction):
width = fileImage.getWidth()
height = fileImage.getHeight()
newIm = EmptyImage(width, height)
for row in range(height)
SyntaxError: invalid syntax
>>> def pixelMapper(fileImage, rgbFunction):
width = fileImage.getWidth()
height = fileImage.getHeight()
newIm = EmptyImage(width, height)
for row in range(height):
oldPixel = fileImage.getPixel(col, row)
newPixel = rgbFunction(oldPixel)
newIm.setPixel(col, row, newPixel)
return newIm
>>> def generalTransform(imageFile):
oldImage = FileImage(imageFile)
width = oldImage.getWidth()
height = oldImage.getHeight()
myImageWindow = ImageWin("GrayScale", width * 2, height)
oldImage.draw(myImageWindow)
newImage = pixelMapper(oldImage, grayPixel)
newImage.setPosition(oldImage.getWidth() + 1, 0)
newImage.draw(myImageWindow)
myImageWindow.exitOnClick()
>>> generalTransform("butterfly.gif")
Traceback (most recent call last):
File "<pyshell#102>", line 1, in <module>
generalTransform("butterfly.gif")
File "<pyshell#101>", line 7, in generalTransform
newImage = pixelMapper(oldImage, grayPixel)
File "<pyshell#90>", line 6, in pixelMapper
oldPixel = fileImage.getPixel(col, row)
NameError: name 'col' is not defined
>>> def pixelMapper(fileImage, rgbFunction):
width = fileImage.getWidth()
height = fileImage.getHeight()
newIm = EmptyImage(width, height)
for col in range(width):
for row in range(height):
oldPixel = fileImage.getPixel(col, row)
newPixel = rgbFunction(oldPixel)
newIm.setPixel(col, row, newPixel)
return newIm
SyntaxError: expected an indented block
>>> def pixelMapper(fileImage, rgbFunction):
width = fileImage.getWidth()
height = fileImage.getHeight()
newIm = EmptyImage(width, height)
for col in range(width):
for row in range(height):
oldPixel = fileImage.getPixel(col, row)
newPixel = rgbFunction(oldPixel)
newIm.setPixel(col, row, newPixel)
return newIm
>>> generalTransform("butterfly.gif")
Traceback (most recent call last):
File "<pyshell#110>", line 1, in <module>
generalTransform("butterfly.gif")
File "<pyshell#101>", line 10, in generalTransform
myImageWindow.exitOnClick()
File "C:/Program Files/Python38\cImage.py", line 143, in exitOnClick
self.getMouse()
File "C:/Program Files/Python38\cImage.py", line 129, in getMouse
self.update()
File "C:\Program Files\Python38\lib\tkinter\__init__.py", line 1305, in update
self.tk.call('update')
KeyboardInterrupt
>>> def doubleImage(oldImage):
oldW = oldImage.getWidth()
oldH = oldImage.getHeight()
newIm = EmptyImage(oldW * 2, oldH * 2)
for row in range(oldH):
for col in range(oldW):
oldPixel = oldImage.getPixel(col, row)
newIm.setPixel(2 * col, 2 * row, oldPixel)
newIm.setPixel(2 * col + 1, 2 * row, oldPixel)
newIm.setPixel(2 * col, 2 * row + 1, oldPixel)
newIm.setPixel(2 * col + 1, 2 * row + 1, oldPixel)
return newIm
>>> def makeDoubleImage(imageFile):
oldImage = FileImage(imageFile)
width = oldImage.getWidth()
height = oldImage.getHeight()
myWin = ImageWin("Double Size", width * 2, height * 3)
oldImage.draw(myWin)
newImage = doubleImage(oldImage)
newImage.setPosition(0, oldImage.getHeight() + 1)
newImage.draw(myWin)
myWin.exitOnClick()
>>> makeDoubleImage("butterfly.gif")
>>> def verticalFlip(oldImage):
oldW = oldImage.getWidth()
oldH = oldImage.getHeight()
newIm = EmptyImage(oldW, oldH)
maxP = oldW - 1
for row in range(oldH)
SyntaxError: invalid syntax
>>> def verticalFlip(oldImage):
oldW = oldImage.getWidth()
oldH = oldImage.getHeight()
newIm = EmptyImage(oldW, oldH)
maxP = oldW - 1
for row in range(oldH):
for col in range(oldW):
oldPixel = oldImage.getPixel(maxP - col, row)
newIm.setPixel(col, row, oldPixel)
return newIm
>>> def makeDoubleImage(imageFile):
oldImage = FileImage(imageFile)
width = oldImage.getWidth()
height = oldImage.getHeight()
myWin = ImageWin("Double Size", width * 2, height * 3)
oldImage.draw(myWin)
newImage = verticalFlip(oldImage)
newImage.setPosition(0, oldImage.getHeight() + 1)
newImage.draw(myWin)
myWin.exitOnClick()
>>> makeDoubleImage("butterfly.gif")
>>>
| 33.556391 | 95 | 0.68855 | 1,075 | 8,926 | 5.713488 | 0.172093 | 0.020514 | 0.022794 | 0.019049 | 0.700098 | 0.689189 | 0.67649 | 0.665581 | 0.628785 | 0.612504 | 0 | 0.034841 | 0.170401 | 8,926 | 265 | 96 | 33.683019 | 0.794598 | 0 | 0 | 0.659751 | 0 | 0.004149 | 0.092714 | 0.024478 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.008299 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bdad9c7d6b8e642fe2e955ae47ef4eaaba4d6a2c | 367 | py | Python | tests/django/app/urls.py | apilytics/apilytics-python | c917331955c6f9b7469175df7838b7fd40dee33f | [
"MIT"
] | 5 | 2022-01-06T17:30:13.000Z | 2022-01-16T11:38:31.000Z | tests/django/app/urls.py | apilytics/apilytics-python | c917331955c6f9b7469175df7838b7fd40dee33f | [
"MIT"
] | 18 | 2022-01-08T12:58:49.000Z | 2022-03-27T16:41:53.000Z | tests/django/app/urls.py | apilytics/apilytics-python | c917331955c6f9b7469175df7838b7fd40dee33f | [
"MIT"
] | null | null | null | import django.urls
import tests.django.app.views
urlpatterns = [
django.urls.re_path(r"^error/?$", tests.django.app.views.error_view),
django.urls.re_path(r"^empty/?$", tests.django.app.views.no_body_view),
django.urls.re_path(r"^streaming/?$", tests.django.app.views.streaming_view),
django.urls.re_path(r"^.*$", tests.django.app.views.ok_view),
]
| 33.363636 | 81 | 0.713896 | 57 | 367 | 4.438596 | 0.298246 | 0.197628 | 0.27668 | 0.375494 | 0.316206 | 0.249012 | 0 | 0 | 0 | 0 | 0 | 0 | 0.092643 | 367 | 10 | 82 | 36.7 | 0.75976 | 0 | 0 | 0 | 0 | 0 | 0.095368 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bdc85851ff0ea6d3c7072790239cf7e4ff346f28 | 257 | py | Python | scripts/fetch/fetch.py | ochsec/arktracker | ebc6f517ff503c5fd9d8b9726eab1641a48033f6 | [
"MIT"
] | null | null | null | scripts/fetch/fetch.py | ochsec/arktracker | ebc6f517ff503c5fd9d8b9726eab1641a48033f6 | [
"MIT"
] | null | null | null | scripts/fetch/fetch.py | ochsec/arktracker | ebc6f517ff503c5fd9d8b9726eab1641a48033f6 | [
"MIT"
] | null | null | null | from funds import funds
from lib import connect_db, check_updated, insert_data
for fund in funds:
conn = connect_db()
updated, df = check_updated(fund["name"], fund["csv_url"], conn)
if not updated:
insert_data(fund["name"], df, conn)
| 25.7 | 68 | 0.688716 | 39 | 257 | 4.358974 | 0.512821 | 0.105882 | 0.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.198444 | 257 | 9 | 69 | 28.555556 | 0.825243 | 0 | 0 | 0 | 0 | 0 | 0.058366 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bdd3b3b4f37c9ab5c4dbed2ce01ae2c9e7af9002 | 1,480 | py | Python | blog/views/help_requests.py | junaidiiith/Disaster-Help-Predictor | 79938619752861e5141207cca920900cea229a62 | [
"Apache-2.0"
] | null | null | null | blog/views/help_requests.py | junaidiiith/Disaster-Help-Predictor | 79938619752861e5141207cca920900cea229a62 | [
"Apache-2.0"
] | null | null | null | blog/views/help_requests.py | junaidiiith/Disaster-Help-Predictor | 79938619752861e5141207cca920900cea229a62 | [
"Apache-2.0"
] | null | null | null | from django.contrib.auth.models import User
from django.core.paginator import Paginator
from django.shortcuts import render
from blog.models.help_request import Request
from django.template.defaulttags import register
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.urls import reverse_lazy
from django.views import generic
from django.views.generic import CreateView, UpdateView, DeleteView
CHOICES = [(1,"Pending"), (2, "Assigned"), (3, "Resolved")]
choices = {1: "Pending", 2: "Assigned", 3: "Resolved"}
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
NUM_OF_POSTS = 10
def help_requests(request):
requests_list = Request.objects.all().order_by('-pub_date')
paginator = Paginator(requests_list, NUM_OF_POSTS) # Show NUM_OF_PAGES posts per page
page = request.GET.get('page')
requests = paginator.get_page(page)
return render(request, 'blog/helprequests.html', {'rqsts': requests, 'choices': choices })
def assign(request):
pass
class RequestView(generic.DetailView):
model = Request
template_name = 'blog/request.html'
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super().get_context_data(**kwargs)
# Add in the username
request = Request.objects.get(id=self.kwargs['pk'])
context['req'] = request
context['choices'] = choices
return context | 35.238095 | 94 | 0.727027 | 190 | 1,480 | 5.557895 | 0.452632 | 0.075758 | 0.032197 | 0.039773 | 0.0625 | 0.0625 | 0.0625 | 0 | 0 | 0 | 0 | 0.006467 | 0.164189 | 1,480 | 42 | 95 | 35.238095 | 0.847211 | 0.07027 | 0 | 0 | 0 | 0 | 0.088857 | 0.016023 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.0625 | 0.28125 | 0.03125 | 0.59375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
bdd48e30c95ed6a4b55dab7b0d66015b288398e4 | 1,739 | py | Python | game_of_life.py | elisabethschiele/recurse-small-projects | 2d7fe3076e90883bc10a70316303a518adb5fa38 | [
"MIT"
] | null | null | null | game_of_life.py | elisabethschiele/recurse-small-projects | 2d7fe3076e90883bc10a70316303a518adb5fa38 | [
"MIT"
] | null | null | null | game_of_life.py | elisabethschiele/recurse-small-projects | 2d7fe3076e90883bc10a70316303a518adb5fa38 | [
"MIT"
] | null | null | null | '''
TODO:
main function
board initialization
functions for changing tile status
'''
import random
board = []
dimension = 5
def main():
global board
initialize_board()
print_board(board)
board = update_board(board)
print_board(board)
def initialize_board():
global board
for row in range(dimension):
grid_rows = []
for column in range(dimension):
if random.randint(0,2) == 0:
grid_rows += [1]
else:
grid_rows += [0]
board += [grid_rows]
return board
def print_board(board):
print("")
for i in range(5):
print(board[i])
print("")
def update_board(board):
for row in range(dimension):
for column in range(dimension):
##count alive neighbour cells
alive_cells = 0
try:
for i in range(3):
if board[row-1][i-1] == 0:
alive_cells += 1
except IndexError:
pass
continue
try:
if board[row][column-1] == 0:
alive_cells += 1
except IndexError:
pass
continue
try:
if board[row][column+1] == 0:
alive_cells += 1
except IndexError:
pass
continue
try:
for i in range(3):
if board[row+1][i-1] == 0:
alive_cells += 1
except IndexError:
pass
continue
## change status
if alive_cells == 3:
board[row][column]=1
else:
if alive_cells <= 1:
board[row][column]= 0
elif alive_cells >= 4:
board[row][column]= 0
return(board)
if __name__ == '__main__':
main()
| 20.22093 | 42 | 0.509488 | 202 | 1,739 | 4.252475 | 0.232673 | 0.093132 | 0.064028 | 0.055879 | 0.419092 | 0.360885 | 0.298021 | 0.298021 | 0.298021 | 0.298021 | 0 | 0.028222 | 0.388729 | 1,739 | 85 | 43 | 20.458824 | 0.779868 | 0.06728 | 0 | 0.553846 | 0 | 0 | 0.004966 | 0 | 0 | 0 | 0 | 0.011765 | 0 | 1 | 0.061538 | false | 0.061538 | 0.015385 | 0 | 0.092308 | 0.092308 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
bdd9c4be9330634a2bff79dcf7693c24bd787c5c | 803 | py | Python | database/movies.py | frank-nguyen-vd/casting-agency | 48ec116b557d672725cfa1d4d422eea2000db951 | [
"MIT"
] | null | null | null | database/movies.py | frank-nguyen-vd/casting-agency | 48ec116b557d672725cfa1d4d422eea2000db951 | [
"MIT"
] | null | null | null | database/movies.py | frank-nguyen-vd/casting-agency | 48ec116b557d672725cfa1d4d422eea2000db951 | [
"MIT"
] | null | null | null | from database import db
class Movies(db.Model):
__tablename__ = "movies"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String)
release_date = db.Column(db.DateTime(120))
def __init__(self, title, release_date):
self.title = title
self.release_date = release_date
def insert(self):
db.session.add(self)
db.session.commit()
def update(self):
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def format(self):
return {
"id": self.id,
"title": self.title,
"release_date": self.release_date,
}
def __repr__(self):
return "<Movies: {}, {}, {}>".format(self.id, self.title, self.release_date)
| 23.617647 | 84 | 0.58655 | 98 | 803 | 4.602041 | 0.326531 | 0.170732 | 0.144124 | 0.126386 | 0.252772 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005181 | 0.278954 | 803 | 33 | 85 | 24.333333 | 0.773748 | 0 | 0 | 0.12 | 0 | 0 | 0.05604 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.24 | false | 0 | 0.04 | 0.08 | 0.56 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
bdde841cf8145d4c92eaa454c747a7f342eef014 | 523 | py | Python | ymir/backend/src/ymir_viz/tests/conftest.py | Zhang-SJ930104/ymir | dd6481be6f229ade4cf8fba64ef44a15357430c4 | [
"Apache-2.0"
] | 64 | 2021-11-15T03:48:00.000Z | 2022-03-25T07:08:46.000Z | ymir/backend/src/ymir_viz/tests/conftest.py | Zhang-SJ930104/ymir | dd6481be6f229ade4cf8fba64ef44a15357430c4 | [
"Apache-2.0"
] | 35 | 2021-11-23T04:14:35.000Z | 2022-03-26T09:03:43.000Z | ymir/backend/src/ymir_viz/tests/conftest.py | Aryalfrat/ymir | d4617ed00ef67a77ab4e1944763f608bface4be6 | [
"Apache-2.0"
] | 57 | 2021-11-11T10:15:40.000Z | 2022-03-29T07:27:54.000Z | # -*- coding: utf-8 -*-
import pytest
from src.app import create_connexion_app
connexion_app_cache = None
def get_app():
global connexion_app_cache
if not connexion_app_cache:
config = dict()
connexion_app_cache = create_connexion_app(config)
return connexion_app_cache
@pytest.fixture(autouse=True)
def core_app():
connexion_app = get_app()
app = connexion_app.app
context = app.app_context()
context.push()
try:
yield app
finally:
context.pop()
| 18.034483 | 58 | 0.676864 | 68 | 523 | 4.911765 | 0.441176 | 0.323353 | 0.254491 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0025 | 0.235182 | 523 | 28 | 59 | 18.678571 | 0.8325 | 0.040153 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0 | 0.105263 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
bdde8bd6ab45d7ec48a6252298b2e61cd49cfe55 | 185 | py | Python | HarvardX/CS50W/flask/classes.py | mohammedelzanaty/myRoad2BeFullStack | eea3a5edb6c6a999136b04fdaea6ce0c81137a58 | [
"MIT"
] | 2 | 2021-04-21T12:05:01.000Z | 2022-01-19T09:58:38.000Z | HarvardX/CS50W/flask/classes.py | mohammedelzanaty/myRoad2BeFullStack | eea3a5edb6c6a999136b04fdaea6ce0c81137a58 | [
"MIT"
] | 34 | 2019-12-26T11:21:42.000Z | 2022-02-27T19:55:10.000Z | HarvardX/CS50W/flask/classes.py | mohammedelzanaty/myRoad2BeFullStack | eea3a5edb6c6a999136b04fdaea6ce0c81137a58 | [
"MIT"
] | 2 | 2021-08-15T07:59:36.000Z | 2022-01-16T06:17:32.000Z | class Point:
def __init__(self, x, y):
self.x = x
self.y = y
point = Point(3, 5)
print(f"The Point x value is {point.x}")
print(f"The Point y value is {point.y}")
| 18.5 | 40 | 0.578378 | 35 | 185 | 2.942857 | 0.4 | 0.097087 | 0.174757 | 0.271845 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014815 | 0.27027 | 185 | 9 | 41 | 20.555556 | 0.748148 | 0 | 0 | 0 | 0 | 0 | 0.324324 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.285714 | 0.285714 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
da03205fc5eef40b01a01fcba8ec99c04f60def3 | 772 | py | Python | SpringSemester2021/05_AssociationAnalysis/Ex05_01_Sol.py | KretschiGL/DataScienceLecture | e6bbb3efd531b08aa4757fb6e89d12e959678a44 | [
"MIT"
] | 1 | 2021-05-09T11:02:35.000Z | 2021-05-09T11:02:35.000Z | SpringSemester2021/05_AssociationAnalysis/Ex05_01_Sol.py | KretschiGL/DataScienceLecture | e6bbb3efd531b08aa4757fb6e89d12e959678a44 | [
"MIT"
] | null | null | null | SpringSemester2021/05_AssociationAnalysis/Ex05_01_Sol.py | KretschiGL/DataScienceLecture | e6bbb3efd531b08aa4757fb6e89d12e959678a44 | [
"MIT"
] | 1 | 2020-05-26T15:35:40.000Z | 2020-05-26T15:35:40.000Z | # Init Solution
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline
import seaborn as sns
sns.set()
from IPython.display import display, Markdown
from mlxtend.frequent_patterns import apriori
from mlxtend.frequent_patterns import association_rules
# Init Solution completed
display(Markdown("##### Loading"))
data = pd.read_csv("./Ex05_01_Data.csv", sep=";")
display(data.head(5))
display(Markdown("##### Preprocessing"))
data = data.replace("X", 1).fillna(0).astype(int)
display(data.head(5))
display(Markdown("##### Item sets"))
itemset = apriori(data, min_support=.3, use_colnames=True)
display(itemset)
display(Markdown("##### Rules"))
rules = association_rules(itemset, metric="confidence", min_threshold=.85)
display(rules) | 27.571429 | 74 | 0.756477 | 106 | 772 | 5.415094 | 0.528302 | 0.130662 | 0.066202 | 0.094077 | 0.222997 | 0.108014 | 0 | 0 | 0 | 0 | 0 | 0.015759 | 0.095855 | 772 | 28 | 75 | 27.571429 | 0.80659 | 0.047927 | 0 | 0.095238 | 0 | 0 | 0.120055 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.333333 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
da0e93d07d2dc2d78f51f369e076b1e9a7ff9b5e | 990 | py | Python | exchangelib/services/send_notification.py | RossK1/exchangelib | 5550c2fbcc064943e3b4e150f74a724e0bd0a9f3 | [
"BSD-2-Clause"
] | 1,006 | 2016-07-18T16:42:55.000Z | 2022-03-31T10:43:50.000Z | exchangelib/services/send_notification.py | RossK1/exchangelib | 5550c2fbcc064943e3b4e150f74a724e0bd0a9f3 | [
"BSD-2-Clause"
] | 966 | 2016-05-13T18:55:43.000Z | 2022-03-31T15:24:56.000Z | exchangelib/services/send_notification.py | RossK1/exchangelib | 5550c2fbcc064943e3b4e150f74a724e0bd0a9f3 | [
"BSD-2-Clause"
] | 272 | 2016-04-05T02:17:10.000Z | 2022-03-24T08:15:57.000Z | from .common import EWSService
from ..properties import Notification
from ..util import MNS
class SendNotification(EWSService):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/sendnotification
This is not an actual EWS service you can call. We only use it to parse the XML body of push notifications.
"""
SERVICE_NAME = 'SendNotification'
def call(self):
raise NotImplementedError()
def _elems_to_objs(self, elems):
for elem in elems:
if isinstance(elem, Exception):
yield elem
continue
yield Notification.from_xml(elem=elem, account=None)
@classmethod
def _response_tag(cls):
"""Return the name of the element containing the service response."""
return '{%s}%s' % (MNS, cls.SERVICE_NAME)
@classmethod
def _get_elements_in_container(cls, container):
return container.findall(Notification.response_tag())
| 30.9375 | 111 | 0.679798 | 120 | 990 | 5.5 | 0.608333 | 0.048485 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.232323 | 990 | 31 | 112 | 31.935484 | 0.868421 | 0.278788 | 0 | 0.105263 | 0 | 0 | 0.0317 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0 | 0.157895 | 0.052632 | 0.578947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
da3e0ddeef19e74b0a270a9a24eda9a95933a6a4 | 1,042 | py | Python | Ignatov_Mikhail_dz_10/exercise_3.py | HellFrozenRain/GB_Homework_Python_1 | b0baea6b7efecc6bd649618d0aeba93be57a389a | [
"MIT"
] | null | null | null | Ignatov_Mikhail_dz_10/exercise_3.py | HellFrozenRain/GB_Homework_Python_1 | b0baea6b7efecc6bd649618d0aeba93be57a389a | [
"MIT"
] | null | null | null | Ignatov_Mikhail_dz_10/exercise_3.py | HellFrozenRain/GB_Homework_Python_1 | b0baea6b7efecc6bd649618d0aeba93be57a389a | [
"MIT"
] | null | null | null | class Cell:
def __init__(self, number):
self.number = number
def __add__(self, other):
return Cell(self.number + other.number)
def __sub__(self, other):
result = self.number - other.number
if result > 0:
return Cell(result)
else:
print('в первой клетке мало ячеек')
return Cell(None)
def __mul__(self, other):
return Cell(self.number * other.number)
def __floordiv__(self, other):
return Cell(self.number // other.number)
def make_order(self, rows):
return '\\n'.join(['*' * rows for i in range(self.number // rows)]) + '\\n' + '*' * (self.number % rows)
cell_1 = Cell(17)
cell_2 = Cell(12)
print(f' Сумма: {(cell_1 + cell_2).number}')
print(f' Разность: {(cell_1 - cell_2).number}')
print(f' Разность: {(cell_2 - cell_1).number}')
print(f' Произведение: {(cell_1 * cell_2).number}')
print(f' Деление: {(cell_1 // cell_2).number}')
print(f' Деление: {(cell_2 // cell_1).number}')
print(cell_2.make_order(5))
| 27.421053 | 112 | 0.604607 | 146 | 1,042 | 4.061644 | 0.287671 | 0.134907 | 0.075885 | 0.141653 | 0.487352 | 0.487352 | 0.430017 | 0.430017 | 0.430017 | 0 | 0 | 0.026283 | 0.233205 | 1,042 | 37 | 113 | 28.162162 | 0.715895 | 0 | 0 | 0 | 0 | 0 | 0.247115 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0 | 0.148148 | 0.481481 | 0.296296 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
da4199ab676b8deb287dd46d9e98683fefaed5d6 | 74,598 | py | Python | pysnmp/WWP-LEOS-PORT-STATS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/WWP-LEOS-PORT-STATS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/WWP-LEOS-PORT-STATS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module WWP-LEOS-PORT-STATS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/WWP-LEOS-PORT-STATS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:31:30 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
TimeTicks, Bits, NotificationType, IpAddress, iso, Counter64, Integer32, Gauge32, ModuleIdentity, Unsigned32, ObjectIdentity, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "Bits", "NotificationType", "IpAddress", "iso", "Counter64", "Integer32", "Gauge32", "ModuleIdentity", "Unsigned32", "ObjectIdentity", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
wwpModulesLeos, wwpModules = mibBuilder.importSymbols("WWP-SMI", "wwpModulesLeos", "wwpModules")
wwpLeosPortStatsMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3))
wwpLeosPortStatsMIB.setRevisions(('2012-11-16 00:00', '2010-02-12 00:00', '2001-04-03 17:00',))
if mibBuilder.loadTexts: wwpLeosPortStatsMIB.setLastUpdated('201211160000Z')
if mibBuilder.loadTexts: wwpLeosPortStatsMIB.setOrganization('Ciena, Inc')
wwpLeosPortStatsMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1))
wwpLeosPortStats = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1))
wwpLeosPortStatsMIBNotificationPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 2))
wwpLeosPortStatsMIBNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 2, 0))
wwpLeosPortStatsMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 3))
wwpLeosPortStatsMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 3, 1))
wwpLeosPortStatsMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 3, 2))
wwpLeosPortStatsReset = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("none", 0), ("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPortStatsReset.setStatus('current')
wwpLeosPortStatsTable = MibTable((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2), )
if mibBuilder.loadTexts: wwpLeosPortStatsTable.setStatus('current')
wwpLeosPortStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1), ).setIndexNames((0, "WWP-LEOS-PORT-STATS-MIB", "wwpLeosPortStatsPortId"))
if mibBuilder.loadTexts: wwpLeosPortStatsEntry.setStatus('current')
wwpLeosPortStatsPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsPortId.setStatus('current')
wwpLeosPortStatsRxBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxBytes.setStatus('current')
wwpLeosPortStatsRxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxPkts.setStatus('current')
wwpLeosPortStatsRxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxCrcErrorPkts.setStatus('current')
wwpLeosPortStatsRxBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxBcastPkts.setStatus('current')
wwpLeosPortStatsUndersizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsUndersizePkts.setStatus('current')
wwpLeosPortStatsOversizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsOversizePkts.setStatus('current')
wwpLeosPortStatsFragmentPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsFragmentPkts.setStatus('current')
wwpLeosPortStatsJabberPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsJabberPkts.setStatus('current')
wwpLeosPortStats64BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats64BytePkts.setStatus('current')
wwpLeosPortStats65To127BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats65To127BytePkts.setStatus('current')
wwpLeosPortStats128To255BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats128To255BytePkts.setStatus('current')
wwpLeosPortStats256To511BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats256To511BytePkts.setStatus('current')
wwpLeosPortStats512To1023BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats512To1023BytePkts.setStatus('current')
wwpLeosPortStats1024To1518BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats1024To1518BytePkts.setStatus('current')
wwpLeosPortStatsTxBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxBytes.setStatus('current')
wwpLeosPortStatsTxTBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxTBytes.setStatus('deprecated')
wwpLeosPortStatsTxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxPkts.setStatus('current')
wwpLeosPortStatsTxExDeferPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxExDeferPkts.setStatus('current')
wwpLeosPortStatsTxGiantPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxGiantPkts.setStatus('current')
wwpLeosPortStatsTxUnderRunPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxUnderRunPkts.setStatus('current')
wwpLeosPortStatsTxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxCrcErrorPkts.setStatus('current')
wwpLeosPortStatsTxLCheckErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxLCheckErrorPkts.setStatus('current')
wwpLeosPortStatsTxLOutRangePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxLOutRangePkts.setStatus('current')
wwpLeosPortStatsTxLateCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxLateCollPkts.setStatus('current')
wwpLeosPortStatsTxExCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxExCollPkts.setStatus('current')
wwpLeosPortStatsTxSingleCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxSingleCollPkts.setStatus('current')
wwpLeosPortStatsTxCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxCollPkts.setStatus('current')
wwpLeosPortStatsTxPausePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxPausePkts.setStatus('current')
wwpLeosPortStatsTxMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxMcastPkts.setStatus('current')
wwpLeosPortStatsTxBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxBcastPkts.setStatus('current')
wwpLeosPortStatsPortReset = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("none", 0), ("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPortStatsPortReset.setStatus('current')
wwpLeosPortStatsRxMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxMcastPkts.setStatus('current')
wwpLeosPortStatsRxPausePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxPausePkts.setStatus('current')
wwpLeosPortStats1519To2047BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats1519To2047BytePkts.setStatus('current')
wwpLeosPortStats2048To4095BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats2048To4095BytePkts.setStatus('current')
wwpLeosPortStats4096To9216BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStats4096To9216BytePkts.setStatus('current')
wwpLeosPortStatsTxDeferPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxDeferPkts.setStatus('current')
wwpLeosPortStatsTx64BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx64BytePkts.setStatus('current')
wwpLeosPortStatsTx65To127BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx65To127BytePkts.setStatus('current')
wwpLeosPortStatsTx128To255BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx128To255BytePkts.setStatus('current')
wwpLeosPortStatsTx256To511BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx256To511BytePkts.setStatus('current')
wwpLeosPortStatsTx512To1023BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx512To1023BytePkts.setStatus('current')
wwpLeosPortStatsTx1024To1518BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx1024To1518BytePkts.setStatus('current')
wwpLeosPortStatsTx1519To2047BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx1519To2047BytePkts.setStatus('current')
wwpLeosPortStatsTx2048To4095BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx2048To4095BytePkts.setStatus('current')
wwpLeosPortStatsTx4096To9216BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTx4096To9216BytePkts.setStatus('current')
wwpLeosPortStatsRxFpgaDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxFpgaDropPkts.setStatus('current')
wwpLeosPortStatsPortLinkUp = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsPortLinkUp.setStatus('current')
wwpLeosPortStatsPortLinkDown = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsPortLinkDown.setStatus('current')
wwpLeosPortStatsPortLinkFlap = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsPortLinkFlap.setStatus('current')
wwpLeosPortStatsRxUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxUcastPkts.setStatus('current')
wwpLeosPortStatsTxUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 53), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxUcastPkts.setStatus('current')
wwpLeosPortStatsRxDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 54), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxDropPkts.setStatus('current')
wwpLeosPortStatsRxDiscardPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 55), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxDiscardPkts.setStatus('current')
wwpLeosPortStatsRxLOutRangePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 56), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxLOutRangePkts.setStatus('current')
wwpLeosPortStatsRxFpgaBufferDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 57), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxFpgaBufferDropPkts.setStatus('current')
wwpLeosPortStatsTxFpgaBufferDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 58), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsTxFpgaBufferDropPkts.setStatus('current')
wwpLeosPortStatsFpgaVlanPriFilterDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 59), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsFpgaVlanPriFilterDropPkts.setStatus('current')
wwpLeosPortStatsFpgaRxErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 60), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsFpgaRxErrorPkts.setStatus('current')
wwpLeosPortStatsFpgaRxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 61), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsFpgaRxCrcErrorPkts.setStatus('current')
wwpLeosPortStatsFpgaRxIpCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 62), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsFpgaRxIpCrcErrorPkts.setStatus('current')
wwpLeosPortStatsRxInErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 2, 1, 63), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortStatsRxInErrorPkts.setStatus('current')
wwpLeosPortTotalStatsTable = MibTable((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3), )
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTable.setStatus('current')
wwpLeosPortTotalStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1), ).setIndexNames((0, "WWP-LEOS-PORT-STATS-MIB", "wwpLeosPortTotalStatsPortId"))
if mibBuilder.loadTexts: wwpLeosPortTotalStatsEntry.setStatus('current')
wwpLeosPortTotalStatsPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsPortId.setStatus('current')
wwpLeosPortTotalStatsRxBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxBytes.setStatus('current')
wwpLeosPortTotalStatsRxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxPkts.setStatus('current')
wwpLeosPortTotalStatsRxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxCrcErrorPkts.setStatus('current')
wwpLeosPortTotalStatsRxBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxBcastPkts.setStatus('current')
wwpLeosPortTotalStatsUndersizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsUndersizePkts.setStatus('current')
wwpLeosPortTotalStatsOversizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsOversizePkts.setStatus('current')
wwpLeosPortTotalStatsFragmentPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsFragmentPkts.setStatus('current')
wwpLeosPortTotalStatsJabberPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsJabberPkts.setStatus('current')
wwpLeosPortTotalStats64BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats64BytePkts.setStatus('current')
wwpLeosPortTotalStats65To127BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats65To127BytePkts.setStatus('current')
wwpLeosPortTotalStats128To255BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats128To255BytePkts.setStatus('current')
wwpLeosPortTotalStats256To511BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats256To511BytePkts.setStatus('current')
wwpLeosPortTotalStats512To1023BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats512To1023BytePkts.setStatus('current')
wwpLeosPortTotalStats1024To1518BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats1024To1518BytePkts.setStatus('current')
wwpLeosPortTotalStatsTxBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxBytes.setStatus('current')
wwpLeosPortTotalStatsTxTBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxTBytes.setStatus('deprecated')
wwpLeosPortTotalStatsTxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxPkts.setStatus('current')
wwpLeosPortTotalStatsTxExDeferPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxExDeferPkts.setStatus('current')
wwpLeosPortTotalStatsTxGiantPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxGiantPkts.setStatus('current')
wwpLeosPortTotalStatsTxUnderRunPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxUnderRunPkts.setStatus('current')
wwpLeosPortTotalStatsTxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxCrcErrorPkts.setStatus('current')
wwpLeosPortTotalStatsTxLCheckErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxLCheckErrorPkts.setStatus('current')
wwpLeosPortTotalStatsTxLOutRangePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxLOutRangePkts.setStatus('current')
wwpLeosPortTotalStatsTxLateCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxLateCollPkts.setStatus('current')
wwpLeosPortTotalStatsTxExCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 26), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxExCollPkts.setStatus('current')
wwpLeosPortTotalStatsTxSingleCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 27), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxSingleCollPkts.setStatus('current')
wwpLeosPortTotalStatsTxCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 28), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxCollPkts.setStatus('current')
wwpLeosPortTotalStatsTxPausePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxPausePkts.setStatus('current')
wwpLeosPortTotalStatsTxMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxMcastPkts.setStatus('current')
wwpLeosPortTotalStatsTxBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxBcastPkts.setStatus('current')
wwpLeosPortTotalStatsPortReset = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("none", 0), ("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsPortReset.setStatus('current')
wwpLeosPortTotalStatsRxMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 33), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxMcastPkts.setStatus('current')
wwpLeosPortTotalStatsRxPausePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 34), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxPausePkts.setStatus('current')
wwpLeosPortTotalStats1519To2047BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 35), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats1519To2047BytePkts.setStatus('current')
wwpLeosPortTotalStats2048To4095BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats2048To4095BytePkts.setStatus('current')
wwpLeosPortTotalStats4096To9216BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStats4096To9216BytePkts.setStatus('current')
wwpLeosPortTotalStatsTxDeferPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxDeferPkts.setStatus('current')
wwpLeosPortTotalStatsTx64BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx64BytePkts.setStatus('current')
wwpLeosPortTotalStatsTx65To127BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx65To127BytePkts.setStatus('current')
wwpLeosPortTotalStatsTx128To255BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx128To255BytePkts.setStatus('current')
wwpLeosPortTotalStatsTx256To511BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 42), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx256To511BytePkts.setStatus('current')
wwpLeosPortTotalStatsTx512To1023BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx512To1023BytePkts.setStatus('current')
wwpLeosPortTotalStatsTx1024To1518BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx1024To1518BytePkts.setStatus('current')
wwpLeosPortTotalStatsTx1519To2047BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx1519To2047BytePkts.setStatus('current')
wwpLeosPortTotalStatsTx2048To4095BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx2048To4095BytePkts.setStatus('current')
wwpLeosPortTotalStatsTx4096To9216BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTx4096To9216BytePkts.setStatus('current')
wwpLeosPortTotalStatsRxFpgaDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxFpgaDropPkts.setStatus('current')
wwpLeosPortTotalStatsPortLinkUp = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsPortLinkUp.setStatus('current')
wwpLeosPortTotalStatsPortLinkDown = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsPortLinkDown.setStatus('current')
wwpLeosPortTotalStatsPortLinkFlap = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 51), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsPortLinkFlap.setStatus('current')
wwpLeosPortTotalStatsRxUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 52), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxUcastPkts.setStatus('current')
wwpLeosPortTotalStatsTxUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 53), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxUcastPkts.setStatus('current')
wwpLeosPortTotalStatsRxDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 54), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxDropPkts.setStatus('current')
wwpLeosPortTotalStatsRxDiscardPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 55), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxDiscardPkts.setStatus('current')
wwpLeosPortTotalStatsRxLOutRangePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 56), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxLOutRangePkts.setStatus('current')
wwpLeosPortTotalStatsRxFpgaBufferDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 57), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxFpgaBufferDropPkts.setStatus('current')
wwpLeosPortTotalStatsTxFpgaBufferDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 58), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsTxFpgaBufferDropPkts.setStatus('current')
wwpLeosPortTotalStatsFpgaVlanPriFilterDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 59), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsFpgaVlanPriFilterDropPkts.setStatus('current')
wwpLeosPortTotalStatsFpgaRxErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 60), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsFpgaRxErrorPkts.setStatus('current')
wwpLeosPortTotalStatsFpgaRxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 61), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsFpgaRxCrcErrorPkts.setStatus('current')
wwpLeosPortTotalStatsFpgaRxIpCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 62), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsFpgaRxIpCrcErrorPkts.setStatus('current')
wwpLeosPortTotalStatsRxInErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 3, 1, 63), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalStatsRxInErrorPkts.setStatus('current')
wwpLeosPortHCStatsTable = MibTable((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4), )
if mibBuilder.loadTexts: wwpLeosPortHCStatsTable.setStatus('current')
wwpLeosPortHCStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1), ).setIndexNames((0, "WWP-LEOS-PORT-STATS-MIB", "wwpLeosPortHCStatsPortId"))
if mibBuilder.loadTexts: wwpLeosPortHCStatsEntry.setStatus('current')
wwpLeosPortHCStatsPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsPortId.setStatus('current')
wwpLeosPortHCStatsRxBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxBytes.setStatus('current')
wwpLeosPortHCStatsRxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxPkts.setStatus('current')
wwpLeosPortHCStatsRxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxCrcErrorPkts.setStatus('current')
wwpLeosPortHCStatsRxBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxBcastPkts.setStatus('current')
wwpLeosPortHCStatsUndersizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsUndersizePkts.setStatus('current')
wwpLeosPortHCStatsOversizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsOversizePkts.setStatus('current')
wwpLeosPortHCStatsFragmentPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsFragmentPkts.setStatus('current')
wwpLeosPortHCStatsJabberPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsJabberPkts.setStatus('current')
wwpLeosPortHCStats64BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats64BytePkts.setStatus('current')
wwpLeosPortHCStats65To127BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats65To127BytePkts.setStatus('current')
wwpLeosPortHCStats128To255BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats128To255BytePkts.setStatus('current')
wwpLeosPortHCStats256To511BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats256To511BytePkts.setStatus('current')
wwpLeosPortHCStats512To1023BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats512To1023BytePkts.setStatus('current')
wwpLeosPortHCStats1024To1518BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats1024To1518BytePkts.setStatus('current')
wwpLeosPortHCStatsTxBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxBytes.setStatus('current')
wwpLeosPortHCStatsTxTBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxTBytes.setStatus('deprecated')
wwpLeosPortHCStatsTxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxPkts.setStatus('current')
wwpLeosPortHCStatsTxExDeferPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxExDeferPkts.setStatus('current')
wwpLeosPortHCStatsTxGiantPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxGiantPkts.setStatus('current')
wwpLeosPortHCStatsTxUnderRunPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxUnderRunPkts.setStatus('current')
wwpLeosPortHCStatsTxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxCrcErrorPkts.setStatus('current')
wwpLeosPortHCStatsTxLCheckErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 23), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxLCheckErrorPkts.setStatus('current')
wwpLeosPortHCStatsTxLOutRangePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 24), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxLOutRangePkts.setStatus('current')
wwpLeosPortHCStatsTxLateCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 25), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxLateCollPkts.setStatus('current')
wwpLeosPortHCStatsTxExCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxExCollPkts.setStatus('current')
wwpLeosPortHCStatsTxSingleCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 27), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxSingleCollPkts.setStatus('current')
wwpLeosPortHCStatsTxCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 28), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxCollPkts.setStatus('current')
wwpLeosPortHCStatsTxPausePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 29), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxPausePkts.setStatus('current')
wwpLeosPortHCStatsTxMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxMcastPkts.setStatus('current')
wwpLeosPortHCStatsTxBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxBcastPkts.setStatus('current')
wwpLeosPortHCStatsPortReset = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("none", 0), ("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPortHCStatsPortReset.setStatus('current')
wwpLeosPortHCStatsRxMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxMcastPkts.setStatus('current')
wwpLeosPortHCStatsRxPausePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxPausePkts.setStatus('current')
wwpLeosPortHCStats1519To2047BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 35), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats1519To2047BytePkts.setStatus('current')
wwpLeosPortHCStats2048To4095BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats2048To4095BytePkts.setStatus('current')
wwpLeosPortHCStats4096To9216BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStats4096To9216BytePkts.setStatus('current')
wwpLeosPortHCStatsTxDeferPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxDeferPkts.setStatus('current')
wwpLeosPortHCStatsTx64BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx64BytePkts.setStatus('current')
wwpLeosPortHCStatsTx65To127BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 40), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx65To127BytePkts.setStatus('current')
wwpLeosPortHCStatsTx128To255BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 41), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx128To255BytePkts.setStatus('current')
wwpLeosPortHCStatsTx256To511BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 42), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx256To511BytePkts.setStatus('current')
wwpLeosPortHCStatsTx512To1023BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 43), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx512To1023BytePkts.setStatus('current')
wwpLeosPortHCStatsTx1024To1518BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 44), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx1024To1518BytePkts.setStatus('current')
wwpLeosPortHCStatsTx1519To2047BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 45), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx1519To2047BytePkts.setStatus('current')
wwpLeosPortHCStatsTx2048To4095BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 46), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx2048To4095BytePkts.setStatus('current')
wwpLeosPortHCStatsTx4096To9216BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 47), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTx4096To9216BytePkts.setStatus('current')
wwpLeosPortHCStatsRxUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 48), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxUcastPkts.setStatus('current')
wwpLeosPortHCStatsTxUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 49), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsTxUcastPkts.setStatus('current')
wwpLeosPortHCStatsRxDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 50), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxDropPkts.setStatus('current')
wwpLeosPortHCStatsRxDiscardPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 51), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxDiscardPkts.setStatus('current')
wwpLeosPortHCStatsRxLOutRangePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 52), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxLOutRangePkts.setStatus('current')
wwpLeosPortHCStatsRxInErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 53), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsRxInErrorPkts.setStatus('current')
wwpLeosPortHCStatsLastRefresh = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 54), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsLastRefresh.setStatus('current')
wwpLeosPortHCStatsLastChange = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 4, 1, 55), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortHCStatsLastChange.setStatus('current')
wwpLeosPortTotalHCStatsTable = MibTable((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5), )
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTable.setStatus('current')
wwpLeosPortTotalHCStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1), ).setIndexNames((0, "WWP-LEOS-PORT-STATS-MIB", "wwpLeosPortTotalHCStatsPortId"))
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsEntry.setStatus('current')
wwpLeosPortTotalHCStatsPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsPortId.setStatus('current')
wwpLeosPortTotalHCStatsRxBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxBytes.setStatus('current')
wwpLeosPortTotalHCStatsRxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxPkts.setStatus('current')
wwpLeosPortTotalHCStatsRxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxCrcErrorPkts.setStatus('current')
wwpLeosPortTotalHCStatsRxBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxBcastPkts.setStatus('current')
wwpLeosPortTotalHCStatsUndersizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsUndersizePkts.setStatus('current')
wwpLeosPortTotalHCStatsOversizePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsOversizePkts.setStatus('current')
wwpLeosPortTotalHCStatsFragmentPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsFragmentPkts.setStatus('current')
wwpLeosPortTotalHCStatsJabberPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsJabberPkts.setStatus('current')
wwpLeosPortTotalHCStats64BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats64BytePkts.setStatus('current')
wwpLeosPortTotalHCStats65To127BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats65To127BytePkts.setStatus('current')
wwpLeosPortTotalHCStats128To255BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats128To255BytePkts.setStatus('current')
wwpLeosPortTotalHCStats256To511BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats256To511BytePkts.setStatus('current')
wwpLeosPortTotalHCStats512To1023BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats512To1023BytePkts.setStatus('current')
wwpLeosPortTotalHCStats1024To1518BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats1024To1518BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTxBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxBytes.setStatus('current')
wwpLeosPortTotalHCStatsTxTBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxTBytes.setStatus('deprecated')
wwpLeosPortTotalHCStatsTxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxExDeferPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxExDeferPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxGiantPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxGiantPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxUnderRunPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxUnderRunPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxCrcErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxCrcErrorPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxLCheckErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 23), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxLCheckErrorPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxLOutRangePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 24), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxLOutRangePkts.setStatus('current')
wwpLeosPortTotalHCStatsTxLateCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 25), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxLateCollPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxExCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxExCollPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxSingleCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 27), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxSingleCollPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxCollPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 28), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxCollPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxPausePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 29), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxPausePkts.setStatus('current')
wwpLeosPortTotalHCStatsTxMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxMcastPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxBcastPkts.setStatus('current')
wwpLeosPortTotalHCStatsPortReset = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("none", 0), ("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsPortReset.setStatus('current')
wwpLeosPortTotalHCStatsRxMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxMcastPkts.setStatus('current')
wwpLeosPortTotalHCStatsRxPausePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxPausePkts.setStatus('current')
wwpLeosPortTotalHCStats1519To2047BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 35), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats1519To2047BytePkts.setStatus('current')
wwpLeosPortTotalHCStats2048To4095BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats2048To4095BytePkts.setStatus('current')
wwpLeosPortTotalHCStats4096To9216BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStats4096To9216BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTxDeferPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxDeferPkts.setStatus('current')
wwpLeosPortTotalHCStatsTx64BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx64BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTx65To127BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 40), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx65To127BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTx128To255BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 41), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx128To255BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTx256To511BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 42), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx256To511BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTx512To1023BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 43), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx512To1023BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTx1024To1518BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 44), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx1024To1518BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTx1519To2047BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 45), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx1519To2047BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTx2048To4095BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 46), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx2048To4095BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsTx4096To9216BytePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 47), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTx4096To9216BytePkts.setStatus('current')
wwpLeosPortTotalHCStatsRxUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 48), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxUcastPkts.setStatus('current')
wwpLeosPortTotalHCStatsTxUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 49), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsTxUcastPkts.setStatus('current')
wwpLeosPortTotalHCStatsRxDropPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 50), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxDropPkts.setStatus('current')
wwpLeosPortTotalHCStatsRxDiscardPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 51), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxDiscardPkts.setStatus('current')
wwpLeosPortTotalHCStatsRxLOutRangePkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 52), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxLOutRangePkts.setStatus('current')
wwpLeosPortTotalHCStatsRxInErrorPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 53), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsRxInErrorPkts.setStatus('current')
wwpLeosPortTotalHCStatsLastRefresh = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 54), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsLastRefresh.setStatus('current')
wwpLeosPortTotalHCStatsLastChange = MibTableColumn((1, 3, 6, 1, 4, 1, 6141, 2, 60, 3, 1, 1, 5, 1, 55), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPortTotalHCStatsLastChange.setStatus('current')
mibBuilder.exportSymbols("WWP-LEOS-PORT-STATS-MIB", wwpLeosPortStatsRxFpgaBufferDropPkts=wwpLeosPortStatsRxFpgaBufferDropPkts, wwpLeosPortTotalStatsTxTBytes=wwpLeosPortTotalStatsTxTBytes, wwpLeosPortTotalHCStatsRxMcastPkts=wwpLeosPortTotalHCStatsRxMcastPkts, wwpLeosPortTotalHCStatsTxUcastPkts=wwpLeosPortTotalHCStatsTxUcastPkts, wwpLeosPortStatsTx1024To1518BytePkts=wwpLeosPortStatsTx1024To1518BytePkts, wwpLeosPortTotalStatsTxBytes=wwpLeosPortTotalStatsTxBytes, wwpLeosPortHCStatsTxBytes=wwpLeosPortHCStatsTxBytes, wwpLeosPortTotalStatsRxInErrorPkts=wwpLeosPortTotalStatsRxInErrorPkts, wwpLeosPortTotalStatsTxCrcErrorPkts=wwpLeosPortTotalStatsTxCrcErrorPkts, wwpLeosPortStatsRxBcastPkts=wwpLeosPortStatsRxBcastPkts, wwpLeosPortHCStatsTxLCheckErrorPkts=wwpLeosPortHCStatsTxLCheckErrorPkts, wwpLeosPortHCStats4096To9216BytePkts=wwpLeosPortHCStats4096To9216BytePkts, wwpLeosPortStatsPortReset=wwpLeosPortStatsPortReset, wwpLeosPortStatsPortId=wwpLeosPortStatsPortId, wwpLeosPortTotalStats256To511BytePkts=wwpLeosPortTotalStats256To511BytePkts, wwpLeosPortHCStatsTxCollPkts=wwpLeosPortHCStatsTxCollPkts, wwpLeosPortTotalStatsPortReset=wwpLeosPortTotalStatsPortReset, wwpLeosPortTotalHCStatsTxBcastPkts=wwpLeosPortTotalHCStatsTxBcastPkts, wwpLeosPortHCStatsTx64BytePkts=wwpLeosPortHCStatsTx64BytePkts, wwpLeosPortStatsFpgaRxErrorPkts=wwpLeosPortStatsFpgaRxErrorPkts, wwpLeosPortHCStatsTable=wwpLeosPortHCStatsTable, wwpLeosPortStatsRxDiscardPkts=wwpLeosPortStatsRxDiscardPkts, wwpLeosPortStatsFpgaRxCrcErrorPkts=wwpLeosPortStatsFpgaRxCrcErrorPkts, wwpLeosPortTotalStatsTx64BytePkts=wwpLeosPortTotalStatsTx64BytePkts, wwpLeosPortTotalHCStats4096To9216BytePkts=wwpLeosPortTotalHCStats4096To9216BytePkts, wwpLeosPortTotalStats4096To9216BytePkts=wwpLeosPortTotalStats4096To9216BytePkts, wwpLeosPortStatsRxFpgaDropPkts=wwpLeosPortStatsRxFpgaDropPkts, wwpLeosPortTotalStatsRxBcastPkts=wwpLeosPortTotalStatsRxBcastPkts, wwpLeosPortTotalStatsJabberPkts=wwpLeosPortTotalStatsJabberPkts, wwpLeosPortHCStatsTxMcastPkts=wwpLeosPortHCStatsTxMcastPkts, wwpLeosPortTotalHCStats128To255BytePkts=wwpLeosPortTotalHCStats128To255BytePkts, wwpLeosPortTotalHCStatsTxPkts=wwpLeosPortTotalHCStatsTxPkts, wwpLeosPortHCStatsRxUcastPkts=wwpLeosPortHCStatsRxUcastPkts, wwpLeosPortTotalStatsTx2048To4095BytePkts=wwpLeosPortTotalStatsTx2048To4095BytePkts, wwpLeosPortHCStatsRxBcastPkts=wwpLeosPortHCStatsRxBcastPkts, wwpLeosPortStatsTxMcastPkts=wwpLeosPortStatsTxMcastPkts, wwpLeosPortHCStatsPortReset=wwpLeosPortHCStatsPortReset, wwpLeosPortTotalHCStatsUndersizePkts=wwpLeosPortTotalHCStatsUndersizePkts, wwpLeosPortTotalStatsTxLOutRangePkts=wwpLeosPortTotalStatsTxLOutRangePkts, wwpLeosPortStatsTxBcastPkts=wwpLeosPortStatsTxBcastPkts, wwpLeosPortHCStatsUndersizePkts=wwpLeosPortHCStatsUndersizePkts, wwpLeosPortTotalStatsPortId=wwpLeosPortTotalStatsPortId, wwpLeosPortHCStatsTxSingleCollPkts=wwpLeosPortHCStatsTxSingleCollPkts, wwpLeosPortTotalStatsPortLinkFlap=wwpLeosPortTotalStatsPortLinkFlap, wwpLeosPortStatsTxLOutRangePkts=wwpLeosPortStatsTxLOutRangePkts, wwpLeosPortHCStatsTxTBytes=wwpLeosPortHCStatsTxTBytes, wwpLeosPortHCStatsTxUnderRunPkts=wwpLeosPortHCStatsTxUnderRunPkts, wwpLeosPortHCStatsRxLOutRangePkts=wwpLeosPortHCStatsRxLOutRangePkts, wwpLeosPortStats256To511BytePkts=wwpLeosPortStats256To511BytePkts, wwpLeosPortHCStatsTxExDeferPkts=wwpLeosPortHCStatsTxExDeferPkts, wwpLeosPortStats65To127BytePkts=wwpLeosPortStats65To127BytePkts, wwpLeosPortTotalHCStatsRxPkts=wwpLeosPortTotalHCStatsRxPkts, wwpLeosPortStatsFragmentPkts=wwpLeosPortStatsFragmentPkts, wwpLeosPortTotalStatsTxPausePkts=wwpLeosPortTotalStatsTxPausePkts, wwpLeosPortTotalHCStatsRxBcastPkts=wwpLeosPortTotalHCStatsRxBcastPkts, wwpLeosPortTotalStatsTx128To255BytePkts=wwpLeosPortTotalStatsTx128To255BytePkts, wwpLeosPortStats2048To4095BytePkts=wwpLeosPortStats2048To4095BytePkts, wwpLeosPortTotalStatsFpgaVlanPriFilterDropPkts=wwpLeosPortTotalStatsFpgaVlanPriFilterDropPkts, wwpLeosPortTotalStatsRxLOutRangePkts=wwpLeosPortTotalStatsRxLOutRangePkts, wwpLeosPortStatsMIB=wwpLeosPortStatsMIB, wwpLeosPortTotalStatsTx256To511BytePkts=wwpLeosPortTotalStatsTx256To511BytePkts, wwpLeosPortHCStatsLastRefresh=wwpLeosPortHCStatsLastRefresh, wwpLeosPortTotalStatsTable=wwpLeosPortTotalStatsTable, wwpLeosPortTotalHCStatsRxUcastPkts=wwpLeosPortTotalHCStatsRxUcastPkts, wwpLeosPortTotalStatsPortLinkDown=wwpLeosPortTotalStatsPortLinkDown, wwpLeosPortStatsTx65To127BytePkts=wwpLeosPortStatsTx65To127BytePkts, wwpLeosPortTotalStats1024To1518BytePkts=wwpLeosPortTotalStats1024To1518BytePkts, wwpLeosPortHCStatsTx128To255BytePkts=wwpLeosPortHCStatsTx128To255BytePkts, wwpLeosPortHCStatsJabberPkts=wwpLeosPortHCStatsJabberPkts, wwpLeosPortTotalStatsTx512To1023BytePkts=wwpLeosPortTotalStatsTx512To1023BytePkts, wwpLeosPortTotalStatsTx1024To1518BytePkts=wwpLeosPortTotalStatsTx1024To1518BytePkts, wwpLeosPortHCStats2048To4095BytePkts=wwpLeosPortHCStats2048To4095BytePkts, wwpLeosPortHCStatsRxDropPkts=wwpLeosPortHCStatsRxDropPkts, wwpLeosPortStatsTxPausePkts=wwpLeosPortStatsTxPausePkts, wwpLeosPortStatsRxPausePkts=wwpLeosPortStatsRxPausePkts, wwpLeosPortTotalStatsRxFpgaDropPkts=wwpLeosPortTotalStatsRxFpgaDropPkts, wwpLeosPortTotalHCStatsRxPausePkts=wwpLeosPortTotalHCStatsRxPausePkts, wwpLeosPortStatsRxBytes=wwpLeosPortStatsRxBytes, wwpLeosPortTotalStatsTx1519To2047BytePkts=wwpLeosPortTotalStatsTx1519To2047BytePkts, wwpLeosPortTotalStatsTxDeferPkts=wwpLeosPortTotalStatsTxDeferPkts, wwpLeosPortHCStatsTxLateCollPkts=wwpLeosPortHCStatsTxLateCollPkts, wwpLeosPortTotalStats512To1023BytePkts=wwpLeosPortTotalStats512To1023BytePkts, wwpLeosPortStatsMIBNotifications=wwpLeosPortStatsMIBNotifications, wwpLeosPortHCStatsTx65To127BytePkts=wwpLeosPortHCStatsTx65To127BytePkts, wwpLeosPortTotalStatsTxCollPkts=wwpLeosPortTotalStatsTxCollPkts, wwpLeosPortTotalHCStatsRxLOutRangePkts=wwpLeosPortTotalHCStatsRxLOutRangePkts, wwpLeosPortHCStatsTxPausePkts=wwpLeosPortHCStatsTxPausePkts, wwpLeosPortTotalStatsTxMcastPkts=wwpLeosPortTotalStatsTxMcastPkts, wwpLeosPortTotalHCStatsTx64BytePkts=wwpLeosPortTotalHCStatsTx64BytePkts, wwpLeosPortTotalStatsTx4096To9216BytePkts=wwpLeosPortTotalStatsTx4096To9216BytePkts, wwpLeosPortTotalStatsTxGiantPkts=wwpLeosPortTotalStatsTxGiantPkts, wwpLeosPortStatsReset=wwpLeosPortStatsReset, wwpLeosPortTotalStatsTxPkts=wwpLeosPortTotalStatsTxPkts, wwpLeosPortTotalStatsTxSingleCollPkts=wwpLeosPortTotalStatsTxSingleCollPkts, wwpLeosPortStatsPortLinkUp=wwpLeosPortStatsPortLinkUp, wwpLeosPortTotalHCStatsTxLCheckErrorPkts=wwpLeosPortTotalHCStatsTxLCheckErrorPkts, wwpLeosPortTotalStats1519To2047BytePkts=wwpLeosPortTotalStats1519To2047BytePkts, wwpLeosPortTotalHCStatsEntry=wwpLeosPortTotalHCStatsEntry, wwpLeosPortTotalHCStatsTxBytes=wwpLeosPortTotalHCStatsTxBytes, wwpLeosPortStatsTxTBytes=wwpLeosPortStatsTxTBytes, wwpLeosPortStatsRxLOutRangePkts=wwpLeosPortStatsRxLOutRangePkts, wwpLeosPortHCStatsTxDeferPkts=wwpLeosPortHCStatsTxDeferPkts, wwpLeosPortTotalHCStatsRxCrcErrorPkts=wwpLeosPortTotalHCStatsRxCrcErrorPkts, wwpLeosPortTotalStatsFpgaRxCrcErrorPkts=wwpLeosPortTotalStatsFpgaRxCrcErrorPkts, wwpLeosPortTotalStatsFpgaRxIpCrcErrorPkts=wwpLeosPortTotalStatsFpgaRxIpCrcErrorPkts, wwpLeosPortStatsRxUcastPkts=wwpLeosPortStatsRxUcastPkts, wwpLeosPortTotalStatsRxBytes=wwpLeosPortTotalStatsRxBytes, wwpLeosPortHCStatsRxCrcErrorPkts=wwpLeosPortHCStatsRxCrcErrorPkts, wwpLeosPortHCStatsTxCrcErrorPkts=wwpLeosPortHCStatsTxCrcErrorPkts, wwpLeosPortTotalHCStats256To511BytePkts=wwpLeosPortTotalHCStats256To511BytePkts, wwpLeosPortHCStats128To255BytePkts=wwpLeosPortHCStats128To255BytePkts, wwpLeosPortTotalHCStatsLastRefresh=wwpLeosPortTotalHCStatsLastRefresh, wwpLeosPortStatsPortLinkFlap=wwpLeosPortStatsPortLinkFlap, wwpLeosPortStats64BytePkts=wwpLeosPortStats64BytePkts, wwpLeosPortStatsMIBObjects=wwpLeosPortStatsMIBObjects, wwpLeosPortTotalHCStatsTxExDeferPkts=wwpLeosPortTotalHCStatsTxExDeferPkts, wwpLeosPortTotalHCStatsTxUnderRunPkts=wwpLeosPortTotalHCStatsTxUnderRunPkts, wwpLeosPortHCStats1024To1518BytePkts=wwpLeosPortHCStats1024To1518BytePkts, wwpLeosPortHCStatsRxPkts=wwpLeosPortHCStatsRxPkts, wwpLeosPortHCStatsTx4096To9216BytePkts=wwpLeosPortHCStatsTx4096To9216BytePkts, wwpLeosPortTotalStatsTxUcastPkts=wwpLeosPortTotalStatsTxUcastPkts, wwpLeosPortTotalStatsRxDiscardPkts=wwpLeosPortTotalStatsRxDiscardPkts, wwpLeosPortStatsTxPkts=wwpLeosPortStatsTxPkts, wwpLeosPortStatsTxUnderRunPkts=wwpLeosPortStatsTxUnderRunPkts, wwpLeosPortTotalStats2048To4095BytePkts=wwpLeosPortTotalStats2048To4095BytePkts, wwpLeosPortHCStatsTxLOutRangePkts=wwpLeosPortHCStatsTxLOutRangePkts, wwpLeosPortHCStatsEntry=wwpLeosPortHCStatsEntry, wwpLeosPortStatsTx256To511BytePkts=wwpLeosPortStatsTx256To511BytePkts, wwpLeosPortTotalHCStatsTxCrcErrorPkts=wwpLeosPortTotalHCStatsTxCrcErrorPkts, wwpLeosPortStats1519To2047BytePkts=wwpLeosPortStats1519To2047BytePkts, wwpLeosPortTotalStats65To127BytePkts=wwpLeosPortTotalStats65To127BytePkts, wwpLeosPortStatsJabberPkts=wwpLeosPortStatsJabberPkts, wwpLeosPortHCStatsTx512To1023BytePkts=wwpLeosPortHCStatsTx512To1023BytePkts, wwpLeosPortStatsTxLateCollPkts=wwpLeosPortStatsTxLateCollPkts, wwpLeosPortStatsRxDropPkts=wwpLeosPortStatsRxDropPkts, wwpLeosPortHCStatsTx1519To2047BytePkts=wwpLeosPortHCStatsTx1519To2047BytePkts, wwpLeosPortHCStatsRxMcastPkts=wwpLeosPortHCStatsRxMcastPkts, wwpLeosPortTotalHCStatsTable=wwpLeosPortTotalHCStatsTable, wwpLeosPortTotalStatsTxUnderRunPkts=wwpLeosPortTotalStatsTxUnderRunPkts, wwpLeosPortTotalStatsTxExDeferPkts=wwpLeosPortTotalStatsTxExDeferPkts, wwpLeosPortStatsRxCrcErrorPkts=wwpLeosPortStatsRxCrcErrorPkts, wwpLeosPortStatsTxDeferPkts=wwpLeosPortStatsTxDeferPkts, wwpLeosPortStatsTx512To1023BytePkts=wwpLeosPortStatsTx512To1023BytePkts, wwpLeosPortTotalStatsTxLCheckErrorPkts=wwpLeosPortTotalStatsTxLCheckErrorPkts, wwpLeosPortTotalHCStats65To127BytePkts=wwpLeosPortTotalHCStats65To127BytePkts, wwpLeosPortTotalHCStats1519To2047BytePkts=wwpLeosPortTotalHCStats1519To2047BytePkts, wwpLeosPortStatsTable=wwpLeosPortStatsTable, wwpLeosPortTotalHCStatsRxDiscardPkts=wwpLeosPortTotalHCStatsRxDiscardPkts, wwpLeosPortStats4096To9216BytePkts=wwpLeosPortStats4096To9216BytePkts, wwpLeosPortTotalHCStatsTx512To1023BytePkts=wwpLeosPortTotalHCStatsTx512To1023BytePkts, wwpLeosPortStatsMIBNotificationPrefix=wwpLeosPortStatsMIBNotificationPrefix, wwpLeosPortHCStatsTxGiantPkts=wwpLeosPortHCStatsTxGiantPkts, wwpLeosPortTotalStatsRxUcastPkts=wwpLeosPortTotalStatsRxUcastPkts, wwpLeosPortHCStats64BytePkts=wwpLeosPortHCStats64BytePkts, wwpLeosPortHCStatsRxInErrorPkts=wwpLeosPortHCStatsRxInErrorPkts, wwpLeosPortTotalHCStatsRxBytes=wwpLeosPortTotalHCStatsRxBytes, wwpLeosPortTotalStats64BytePkts=wwpLeosPortTotalStats64BytePkts, wwpLeosPortStatsTxExDeferPkts=wwpLeosPortStatsTxExDeferPkts, wwpLeosPortStatsRxPkts=wwpLeosPortStatsRxPkts, wwpLeosPortTotalHCStatsTxTBytes=wwpLeosPortTotalHCStatsTxTBytes, wwpLeosPortStatsMIBGroups=wwpLeosPortStatsMIBGroups, wwpLeosPortStatsTxCrcErrorPkts=wwpLeosPortStatsTxCrcErrorPkts, wwpLeosPortTotalStatsEntry=wwpLeosPortTotalStatsEntry, wwpLeosPortTotalHCStatsFragmentPkts=wwpLeosPortTotalHCStatsFragmentPkts, wwpLeosPortTotalStatsUndersizePkts=wwpLeosPortTotalStatsUndersizePkts, wwpLeosPortTotalStatsTxFpgaBufferDropPkts=wwpLeosPortTotalStatsTxFpgaBufferDropPkts, wwpLeosPortHCStatsTxPkts=wwpLeosPortHCStatsTxPkts, wwpLeosPortTotalHCStats2048To4095BytePkts=wwpLeosPortTotalHCStats2048To4095BytePkts, wwpLeosPortTotalHCStatsTx65To127BytePkts=wwpLeosPortTotalHCStatsTx65To127BytePkts, wwpLeosPortTotalStatsFpgaRxErrorPkts=wwpLeosPortTotalStatsFpgaRxErrorPkts, wwpLeosPortTotalHCStatsPortId=wwpLeosPortTotalHCStatsPortId, wwpLeosPortTotalStatsFragmentPkts=wwpLeosPortTotalStatsFragmentPkts, wwpLeosPortStats512To1023BytePkts=wwpLeosPortStats512To1023BytePkts, wwpLeosPortStats=wwpLeosPortStats, wwpLeosPortHCStatsTx256To511BytePkts=wwpLeosPortHCStatsTx256To511BytePkts, wwpLeosPortTotalHCStatsJabberPkts=wwpLeosPortTotalHCStatsJabberPkts, wwpLeosPortTotalHCStatsTx1024To1518BytePkts=wwpLeosPortTotalHCStatsTx1024To1518BytePkts, wwpLeosPortHCStatsRxBytes=wwpLeosPortHCStatsRxBytes, wwpLeosPortTotalHCStats64BytePkts=wwpLeosPortTotalHCStats64BytePkts, wwpLeosPortStatsTxExCollPkts=wwpLeosPortStatsTxExCollPkts, wwpLeosPortStatsTxGiantPkts=wwpLeosPortStatsTxGiantPkts, wwpLeosPortHCStats65To127BytePkts=wwpLeosPortHCStats65To127BytePkts, wwpLeosPortStats1024To1518BytePkts=wwpLeosPortStats1024To1518BytePkts, wwpLeosPortHCStatsTxUcastPkts=wwpLeosPortHCStatsTxUcastPkts, wwpLeosPortTotalHCStatsTxCollPkts=wwpLeosPortTotalHCStatsTxCollPkts, wwpLeosPortTotalStatsTx65To127BytePkts=wwpLeosPortTotalStatsTx65To127BytePkts, wwpLeosPortTotalStatsRxFpgaBufferDropPkts=wwpLeosPortTotalStatsRxFpgaBufferDropPkts, wwpLeosPortTotalHCStatsTxLOutRangePkts=wwpLeosPortTotalHCStatsTxLOutRangePkts, wwpLeosPortTotalHCStatsTxPausePkts=wwpLeosPortTotalHCStatsTxPausePkts, wwpLeosPortHCStatsOversizePkts=wwpLeosPortHCStatsOversizePkts, wwpLeosPortTotalStatsTxBcastPkts=wwpLeosPortTotalStatsTxBcastPkts, wwpLeosPortStatsTxBytes=wwpLeosPortStatsTxBytes, wwpLeosPortTotalHCStatsTxGiantPkts=wwpLeosPortTotalHCStatsTxGiantPkts, wwpLeosPortTotalHCStatsTxExCollPkts=wwpLeosPortTotalHCStatsTxExCollPkts, wwpLeosPortStatsTx1519To2047BytePkts=wwpLeosPortStatsTx1519To2047BytePkts, PYSNMP_MODULE_ID=wwpLeosPortStatsMIB, wwpLeosPortTotalStatsOversizePkts=wwpLeosPortTotalStatsOversizePkts, wwpLeosPortTotalHCStatsTxMcastPkts=wwpLeosPortTotalHCStatsTxMcastPkts, wwpLeosPortStatsTxLCheckErrorPkts=wwpLeosPortStatsTxLCheckErrorPkts, wwpLeosPortHCStats256To511BytePkts=wwpLeosPortHCStats256To511BytePkts, wwpLeosPortHCStatsTxExCollPkts=wwpLeosPortHCStatsTxExCollPkts, wwpLeosPortHCStatsTxBcastPkts=wwpLeosPortHCStatsTxBcastPkts, wwpLeosPortTotalHCStatsTx4096To9216BytePkts=wwpLeosPortTotalHCStatsTx4096To9216BytePkts, wwpLeosPortHCStatsTx2048To4095BytePkts=wwpLeosPortHCStatsTx2048To4095BytePkts, wwpLeosPortTotalStats128To255BytePkts=wwpLeosPortTotalStats128To255BytePkts, wwpLeosPortStatsTx128To255BytePkts=wwpLeosPortStatsTx128To255BytePkts, wwpLeosPortTotalStatsTxLateCollPkts=wwpLeosPortTotalStatsTxLateCollPkts, wwpLeosPortStatsFpgaVlanPriFilterDropPkts=wwpLeosPortStatsFpgaVlanPriFilterDropPkts, wwpLeosPortStatsTxFpgaBufferDropPkts=wwpLeosPortStatsTxFpgaBufferDropPkts, wwpLeosPortTotalStatsRxPkts=wwpLeosPortTotalStatsRxPkts, wwpLeosPortTotalHCStatsTx2048To4095BytePkts=wwpLeosPortTotalHCStatsTx2048To4095BytePkts, wwpLeosPortHCStatsLastChange=wwpLeosPortHCStatsLastChange, wwpLeosPortHCStats512To1023BytePkts=wwpLeosPortHCStats512To1023BytePkts, wwpLeosPortTotalHCStats512To1023BytePkts=wwpLeosPortTotalHCStats512To1023BytePkts, wwpLeosPortTotalHCStatsTxLateCollPkts=wwpLeosPortTotalHCStatsTxLateCollPkts, wwpLeosPortStatsFpgaRxIpCrcErrorPkts=wwpLeosPortStatsFpgaRxIpCrcErrorPkts, wwpLeosPortStatsRxInErrorPkts=wwpLeosPortStatsRxInErrorPkts, wwpLeosPortStatsUndersizePkts=wwpLeosPortStatsUndersizePkts, wwpLeosPortHCStatsTx1024To1518BytePkts=wwpLeosPortHCStatsTx1024To1518BytePkts, wwpLeosPortTotalHCStatsPortReset=wwpLeosPortTotalHCStatsPortReset, wwpLeosPortTotalStatsTxExCollPkts=wwpLeosPortTotalStatsTxExCollPkts, wwpLeosPortStatsOversizePkts=wwpLeosPortStatsOversizePkts, wwpLeosPortStatsTxSingleCollPkts=wwpLeosPortStatsTxSingleCollPkts, wwpLeosPortTotalStatsRxPausePkts=wwpLeosPortTotalStatsRxPausePkts, wwpLeosPortHCStatsRxPausePkts=wwpLeosPortHCStatsRxPausePkts, wwpLeosPortTotalHCStatsRxInErrorPkts=wwpLeosPortTotalHCStatsRxInErrorPkts, wwpLeosPortTotalHCStatsTx256To511BytePkts=wwpLeosPortTotalHCStatsTx256To511BytePkts, wwpLeosPortTotalStatsRxCrcErrorPkts=wwpLeosPortTotalStatsRxCrcErrorPkts, wwpLeosPortTotalHCStatsLastChange=wwpLeosPortTotalHCStatsLastChange, wwpLeosPortTotalStatsRxMcastPkts=wwpLeosPortTotalStatsRxMcastPkts, wwpLeosPortHCStats1519To2047BytePkts=wwpLeosPortHCStats1519To2047BytePkts, wwpLeosPortStatsMIBConformance=wwpLeosPortStatsMIBConformance, wwpLeosPortStatsTxUcastPkts=wwpLeosPortStatsTxUcastPkts, wwpLeosPortHCStatsRxDiscardPkts=wwpLeosPortHCStatsRxDiscardPkts, wwpLeosPortTotalHCStatsRxDropPkts=wwpLeosPortTotalHCStatsRxDropPkts, wwpLeosPortStats128To255BytePkts=wwpLeosPortStats128To255BytePkts, wwpLeosPortStatsMIBCompliances=wwpLeosPortStatsMIBCompliances, wwpLeosPortTotalStatsPortLinkUp=wwpLeosPortTotalStatsPortLinkUp, wwpLeosPortStatsEntry=wwpLeosPortStatsEntry, wwpLeosPortStatsTxCollPkts=wwpLeosPortStatsTxCollPkts, wwpLeosPortStatsTx2048To4095BytePkts=wwpLeosPortStatsTx2048To4095BytePkts, wwpLeosPortHCStatsFragmentPkts=wwpLeosPortHCStatsFragmentPkts, wwpLeosPortTotalHCStatsOversizePkts=wwpLeosPortTotalHCStatsOversizePkts, wwpLeosPortTotalStatsRxDropPkts=wwpLeosPortTotalStatsRxDropPkts, wwpLeosPortTotalHCStats1024To1518BytePkts=wwpLeosPortTotalHCStats1024To1518BytePkts, wwpLeosPortTotalHCStatsTxDeferPkts=wwpLeosPortTotalHCStatsTxDeferPkts, wwpLeosPortTotalHCStatsTx128To255BytePkts=wwpLeosPortTotalHCStatsTx128To255BytePkts, wwpLeosPortTotalHCStatsTx1519To2047BytePkts=wwpLeosPortTotalHCStatsTx1519To2047BytePkts, wwpLeosPortStatsTx4096To9216BytePkts=wwpLeosPortStatsTx4096To9216BytePkts, wwpLeosPortHCStatsPortId=wwpLeosPortHCStatsPortId, wwpLeosPortStatsPortLinkDown=wwpLeosPortStatsPortLinkDown, wwpLeosPortTotalHCStatsTxSingleCollPkts=wwpLeosPortTotalHCStatsTxSingleCollPkts, wwpLeosPortStatsRxMcastPkts=wwpLeosPortStatsRxMcastPkts, wwpLeosPortStatsTx64BytePkts=wwpLeosPortStatsTx64BytePkts)
mibBuilder.exportSymbols("WWP-LEOS-PORT-STATS-MIB", )
| 144.011583 | 17,280 | 0.798265 | 7,270 | 74,598 | 8.190784 | 0.058597 | 0.010815 | 0.015568 | 0.016995 | 0.364855 | 0.360875 | 0.360875 | 0.359464 | 0.158681 | 0.158681 | 0 | 0.108086 | 0.0759 | 74,598 | 517 | 17,281 | 144.290135 | 0.755715 | 0.004558 | 0 | 0 | 0 | 0 | 0.060001 | 0.003825 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.013725 | 0 | 0.013725 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
da608514f3dffd4330f41af76b0891414175cdfe | 385 | py | Python | reformat/commands/clean.py | DjKuj/reformat | 442e643eb0db69ab89a8ea1c869d3bec0ab5b16f | [
"MIT"
] | null | null | null | reformat/commands/clean.py | DjKuj/reformat | 442e643eb0db69ab89a8ea1c869d3bec0ab5b16f | [
"MIT"
] | null | null | null | reformat/commands/clean.py | DjKuj/reformat | 442e643eb0db69ab89a8ea1c869d3bec0ab5b16f | [
"MIT"
] | null | null | null | import os.path
from reformat import output
from reformat.store import Store
from reformat.util import rmtree
def clean(store: Store) -> int:
legacy_path = os.path.expanduser('~/.reformat')
for directory in (store.directory, legacy_path):
if os.path.exists(directory):
rmtree(directory)
output.write_line(f'Cleaned {directory}.')
return 0
| 25.666667 | 54 | 0.688312 | 50 | 385 | 5.24 | 0.5 | 0.068702 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003289 | 0.21039 | 385 | 14 | 55 | 27.5 | 0.858553 | 0 | 0 | 0 | 0 | 0 | 0.080519 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.363636 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
da6883e021df65f19ce8e1c281ad8790a24be494 | 20,892 | py | Python | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Methods/Aerodynamics/AVL/translate_data.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Methods/Aerodynamics/AVL/translate_data.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Methods/Aerodynamics/AVL/translate_data.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | ## @ingroup Methods-Aerodynamics-AVL
#translate_data.py
#
# Created: Mar 2015, T. Momose
# Modified: Jan 2016, E. Botero
# Apr 2017, M. Clarke
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
import numpy as np
import SUAVE
from SUAVE.Core import Data, Units
from .Data.Cases import Run_Case
## @ingroup Methods-Aerodynamics-AVL
def translate_conditions_to_cases(avl ,conditions):
""" Takes SUAVE Conditions() data structure and translates to a Container of
avl Run_Case()s.
Assumptions:
None
Source:
Drela, M. and Youngren, H., AVL, http://web.mit.edu/drela/Public/web/avl
Inputs:
conditions.aerodynamics.angle_of_attack [radians]
conditions.freestream.mach_number [-]
conditions.freestream.density [kilograms per meters**3]
conditions.freestream.gravity [meters per second**2]
Outputs:
cases [data structur]
Properties Used:
N/A
"""
# set up aerodynamic Conditions object
aircraft = avl.geometry
cases = Run_Case.Container()
for i in range(len(conditions.aerodynamics.angle_of_attack)):
case = Run_Case()
case.tag = avl.settings.filenames.case_template.format(avl.current_status.batch_index,i+1)
case.mass = conditions.weights.total_mass
case.conditions.freestream.mach = conditions.freestream.mach_number
case.conditions.freestream.density = conditions.freestream.density
case.conditions.freestream.gravitational_acceleration = conditions.freestream.gravity
case.conditions.aerodynamics.angle_of_attack = conditions.aerodynamics.angle_of_attack[i]/Units.deg
case.conditions.aerodynamics.side_slip_angle = conditions.aerodynamics.side_slip_angle
# determine the number of wings
n_wings = 0
for wing in aircraft.wings:
n_wings += 1
if wing.symmetric == True:
n_wings += 1
case.num_wings = n_wings
case.n_sw = avl.settings.number_spanwise_vortices
cases.append_case(case)
return cases
def translate_results_to_conditions(cases,results):
""" Takes avl results structure containing the results of each run case stored
each in its own Data() object. Translates into the Conditions() data structure.
Assumptions:
None
Source:
Drela, M. and Youngren, H., AVL, http://web.mit.edu/drela/Public/web/avl
Inputs:
case_res = results
Outputs:
cases [data_structure]
Properties Used:
N/A
"""
num_wings = cases[0].num_wings
n_sw = cases[0].n_sw
dim = len(cases)
# set up aerodynamic Conditions object
res = SUAVE.Analyses.Mission.Segments.Conditions.Aerodynamics()
res.stability.static = Data()
res.stability.dynamic = Data()
# add missing entries
res.S_ref = np.zeros((dim,1))
res.c_ref = np.zeros_like(res.S_ref)
res.b_ref = np.zeros_like(res.S_ref)
res.X_ref = np.zeros_like(res.S_ref)
res.Y_ref = np.zeros_like(res.S_ref)
res.Z_ref = np.zeros_like(res.S_ref)
res.aerodynamics.AoA = np.zeros_like(res.S_ref)
res.aerodynamics.CX = np.zeros_like(res.S_ref)
res.aerodynamics.CY = np.zeros_like(res.S_ref)
res.aerodynamics.CZ = np.zeros_like(res.S_ref)
res.aerodynamics.Cltot = np.zeros_like(res.S_ref)
res.aerodynamics.Cmtot = np.zeros_like(res.S_ref)
res.aerodynamics.Cntot = np.zeros_like(res.S_ref)
res.aerodynamics.roll_moment_coefficient = np.zeros_like(res.S_ref)
res.aerodynamics.pitch_moment_coefficient = np.zeros_like(res.S_ref)
res.aerodynamics.yaw_moment_coefficient = np.zeros_like(res.S_ref)
res.aerodynamics.lift_coefficient = np.zeros_like(res.S_ref)
res.aerodynamics.drag_breakdown.induced = SUAVE.Analyses.Mission.Segments.Conditions.Conditions()
res.aerodynamics.drag_breakdown.induced.total = np.zeros_like(res.S_ref)
res.aerodynamics.drag_breakdown.induced.efficiency_factor= np.zeros_like(res.S_ref)
res.aerodynamics.oswald_efficiency = np.zeros_like(res.S_ref)
# stability axis
res.stability.static.CL_alpha = np.zeros_like(res.S_ref)
res.stability.static.CY_alpha = np.zeros_like(res.S_ref)
res.stability.static.Cl_alpha = np.zeros_like(res.S_ref)
res.stability.static.Cm_alpha = np.zeros_like(res.S_ref)
res.stability.static.Cn_alpha = np.zeros_like(res.S_ref)
res.stability.static.CL_beta = np.zeros_like(res.S_ref)
res.stability.static.CY_beta = np.zeros_like(res.S_ref)
res.stability.static.Cl_beta = np.zeros_like(res.S_ref)
res.stability.static.Cm_beta = np.zeros_like(res.S_ref)
res.stability.static.Cn_beta = np.zeros_like(res.S_ref)
res.stability.static.CL_p = np.zeros_like(res.S_ref)
res.stability.static.CL_q = np.zeros_like(res.S_ref)
res.stability.static.CL_r = np.zeros_like(res.S_ref)
res.stability.static.CY_p = np.zeros_like(res.S_ref)
res.stability.static.CY_q = np.zeros_like(res.S_ref)
res.stability.static.CY_r = np.zeros_like(res.S_ref)
res.stability.static.Cl_p = np.zeros_like(res.S_ref)
res.stability.static.Cl_q = np.zeros_like(res.S_ref)
res.stability.static.Cl_r = np.zeros_like(res.S_ref)
res.stability.static.Cm_p = np.zeros_like(res.S_ref)
res.stability.static.Cm_q = np.zeros_like(res.S_ref)
res.stability.static.Cm_r = np.zeros_like(res.S_ref)
res.stability.static.Cn_p = np.zeros_like(res.S_ref)
res.stability.static.Cn_q = np.zeros_like(res.S_ref)
res.stability.static.Cn_r = np.zeros_like(res.S_ref)
# body axis derivatives
res.stability.static.CX_u = np.zeros_like(res.S_ref)
res.stability.static.CX_v = np.zeros_like(res.S_ref)
res.stability.static.CX_w = np.zeros_like(res.S_ref)
res.stability.static.CY_u = np.zeros_like(res.S_ref)
res.stability.static.CY_v = np.zeros_like(res.S_ref)
res.stability.static.CY_w = np.zeros_like(res.S_ref)
res.stability.static.CZ_u = np.zeros_like(res.S_ref)
res.stability.static.CZ_v = np.zeros_like(res.S_ref)
res.stability.static.CZ_w = np.zeros_like(res.S_ref)
res.stability.static.Cl_u = np.zeros_like(res.S_ref)
res.stability.static.Cl_v = np.zeros_like(res.S_ref)
res.stability.static.Cl_w = np.zeros_like(res.S_ref)
res.stability.static.Cm_u = np.zeros_like(res.S_ref)
res.stability.static.Cm_v = np.zeros_like(res.S_ref)
res.stability.static.Cm_w = np.zeros_like(res.S_ref)
res.stability.static.Cn_u = np.zeros_like(res.S_ref)
res.stability.static.Cn_v = np.zeros_like(res.S_ref)
res.stability.static.Cn_w = np.zeros_like(res.S_ref)
res.stability.static.CX_p = np.zeros_like(res.S_ref)
res.stability.static.CX_q = np.zeros_like(res.S_ref)
res.stability.static.CX_r = np.zeros_like(res.S_ref)
res.stability.static.CY_p = np.zeros_like(res.S_ref)
res.stability.static.CY_q = np.zeros_like(res.S_ref)
res.stability.static.CY_r = np.zeros_like(res.S_ref)
res.stability.static.CZ_p = np.zeros_like(res.S_ref)
res.stability.static.CZ_q = np.zeros_like(res.S_ref)
res.stability.static.CZ_r = np.zeros_like(res.S_ref)
res.stability.static.Cl_p = np.zeros_like(res.S_ref)
res.stability.static.Cl_q = np.zeros_like(res.S_ref)
res.stability.static.Cl_r = np.zeros_like(res.S_ref)
res.stability.static.Cm_p = np.zeros_like(res.S_ref)
res.stability.static.Cm_q = np.zeros_like(res.S_ref)
res.stability.static.Cm_r = np.zeros_like(res.S_ref)
res.stability.static.Cn_p = np.zeros_like(res.S_ref)
res.stability.static.Cn_q = np.zeros_like(res.S_ref)
res.stability.static.Cn_r = np.zeros_like(res.S_ref)
res.stability.static.neutral_point = np.zeros_like(res.S_ref)
res.stability.static.spiral_stability_condition = np.zeros_like(res.S_ref)
# aero results 1: total surface forces and coefficeints
res.aerodynamics.wing_areas = np.zeros((dim,num_wings))
res.aerodynamics.wing_CLs = np.zeros_like(res.aerodynamics.wing_areas)
res.aerodynamics.wing_CDs = np.zeros_like(res.aerodynamics.wing_areas)
# aero results 2 : sectional forces and coefficients
res.aerodynamics.wing_local_spans = np.zeros((dim,num_wings,n_sw))
res.aerodynamics.wing_section_chords = np.zeros_like(res.aerodynamics.wing_local_spans)
res.aerodynamics.wing_section_cls = np.zeros_like(res.aerodynamics.wing_local_spans)
res.aerodynamics.wing_section_induced_angle = np.zeros_like(res.aerodynamics.wing_local_spans)
res.aerodynamics.wing_section_cds = np.zeros_like(res.aerodynamics.wing_local_spans)
res.stability.static.control_surfaces_cases = {}
mach_case = list(results.keys())[0][5:7]
for i in range(len(results.keys())):
aoa_case = '{:02d}'.format(i+1)
tag = 'case_' + mach_case + '_' + aoa_case
case_res = results[tag]
# stability file
res.S_ref[i][0] = case_res.S_ref
res.c_ref[i][0] = case_res.c_ref
res.b_ref[i][0] = case_res.b_ref
res.X_ref[i][0] = case_res.X_ref
res.Y_ref[i][0] = case_res.Y_ref
res.Z_ref[i][0] = case_res.Z_ref
res.aerodynamics.AoA[i][0] = case_res.aerodynamics.AoA
res.aerodynamics.CX[i][0] = case_res.aerodynamics.CX
res.aerodynamics.CY[i][0] = case_res.aerodynamics.CY
res.aerodynamics.CZ[i][0] = case_res.aerodynamics.CZ
res.aerodynamics.Cltot[i][0] = case_res.aerodynamics.Cltot
res.aerodynamics.Cmtot[i][0] = case_res.aerodynamics.Cmtot
res.aerodynamics.Cntot[i][0] = case_res.aerodynamics.Cntot
res.aerodynamics.roll_moment_coefficient[i][0] = case_res.aerodynamics.roll_moment_coefficient
res.aerodynamics.pitch_moment_coefficient[i][0] = case_res.aerodynamics.pitch_moment_coefficient
res.aerodynamics.yaw_moment_coefficient[i][0] = case_res.aerodynamics.yaw_moment_coefficient
res.aerodynamics.lift_coefficient[i][0] = case_res.aerodynamics.total_lift_coefficient
res.aerodynamics.drag_breakdown.induced.total[i][0] = case_res.aerodynamics.induced_drag_coefficient
res.aerodynamics.drag_breakdown.induced.efficiency_factor[i][0] = case_res.aerodynamics.oswald_efficiency
res.aerodynamics.oswald_efficiency[i][0] = case_res.aerodynamics.oswald_efficiency
res.stability.static.CL_alpha[i][0] = case_res.stability.alpha_derivatives.lift_curve_slope
res.stability.static.CY_alpha[i][0] = case_res.stability.alpha_derivatives.side_force_derivative
res.stability.static.Cl_alpha[i][0] = case_res.stability.alpha_derivatives.roll_moment_derivative
res.stability.static.Cm_alpha[i][0] = case_res.stability.alpha_derivatives.pitch_moment_derivative
res.stability.static.Cn_alpha[i][0] = case_res.stability.alpha_derivatives.yaw_moment_derivative
res.stability.static.CL_beta[i][0] = case_res.stability.beta_derivatives.lift_coefficient_derivative
res.stability.static.CY_beta[i][0] = case_res.stability.beta_derivatives.side_force_derivative
res.stability.static.Cl_beta[i][0] = case_res.stability.beta_derivatives.roll_moment_derivative
res.stability.static.Cm_beta[i][0] = case_res.stability.beta_derivatives.pitch_moment_derivative
res.stability.static.Cn_beta[i][0] = case_res.stability.beta_derivatives.yaw_moment_derivative
res.stability.static.CL_p[i][0] = case_res.stability.CL_p
res.stability.static.CL_q[i][0] = case_res.stability.CL_q
res.stability.static.CL_r[i][0] = case_res.stability.CL_r
res.stability.static.CY_p[i][0] = case_res.stability.CY_p
res.stability.static.CY_q[i][0] = case_res.stability.CY_q
res.stability.static.CY_r[i][0] = case_res.stability.CY_r
res.stability.static.Cl_p[i][0] = case_res.stability.Cl_p
res.stability.static.Cl_q[i][0] = case_res.stability.Cl_q
res.stability.static.Cl_r[i][0] = case_res.stability.Cl_r
res.stability.static.Cm_p[i][0] = case_res.stability.Cm_p
res.stability.static.Cm_q[i][0] = case_res.stability.Cm_q
res.stability.static.Cm_r[i][0] = case_res.stability.Cm_r
res.stability.static.Cn_p[i][0] = case_res.stability.Cn_p
res.stability.static.Cn_q[i][0] = case_res.stability.Cn_q
res.stability.static.Cn_r[i][0] = case_res.stability.Cn_r
res.stability.static.CX_u[i][0] = case_res.stability.CX_u
res.stability.static.CX_v[i][0] = case_res.stability.CX_v
res.stability.static.CX_w[i][0] = case_res.stability.CX_w
res.stability.static.CY_u[i][0] = case_res.stability.CY_u
res.stability.static.CY_v[i][0] = case_res.stability.CY_v
res.stability.static.CY_w[i][0] = case_res.stability.CY_w
res.stability.static.CZ_u[i][0] = case_res.stability.CZ_u
res.stability.static.CZ_v[i][0] = case_res.stability.CZ_v
res.stability.static.CZ_w[i][0] = case_res.stability.CZ_w
res.stability.static.Cl_u[i][0] = case_res.stability.Cl_u
res.stability.static.Cl_v[i][0] = case_res.stability.Cl_v
res.stability.static.Cl_w[i][0] = case_res.stability.Cl_w
res.stability.static.Cm_u[i][0] = case_res.stability.Cm_u
res.stability.static.Cm_v[i][0] = case_res.stability.Cm_v
res.stability.static.Cm_w[i][0] = case_res.stability.Cm_w
res.stability.static.Cn_u[i][0] = case_res.stability.Cn_u
res.stability.static.Cn_v[i][0] = case_res.stability.Cn_v
res.stability.static.Cn_w[i][0] = case_res.stability.Cn_w
res.stability.static.CX_p[i][0] = case_res.stability.CX_p
res.stability.static.CX_q[i][0] = case_res.stability.CX_q
res.stability.static.CX_r[i][0] = case_res.stability.CX_r
res.stability.static.CY_p[i][0] = case_res.stability.CY_p
res.stability.static.CY_q[i][0] = case_res.stability.CY_q
res.stability.static.CY_r[i][0] = case_res.stability.CY_r
res.stability.static.CZ_p[i][0] = case_res.stability.CZ_p
res.stability.static.CZ_q[i][0] = case_res.stability.CZ_q
res.stability.static.CZ_r[i][0] = case_res.stability.CZ_r
res.stability.static.Cl_p[i][0] = case_res.stability.Cl_p
res.stability.static.Cl_q[i][0] = case_res.stability.Cl_q
res.stability.static.Cl_r[i][0] = case_res.stability.Cl_r
res.stability.static.Cm_p[i][0] = case_res.stability.Cm_p
res.stability.static.Cm_q[i][0] = case_res.stability.Cm_q
res.stability.static.Cm_r[i][0] = case_res.stability.Cm_r
res.stability.static.Cn_p[i][0] = case_res.stability.Cn_p
res.stability.static.Cn_q[i][0] = case_res.stability.Cn_q
res.stability.static.Cn_r[i][0] = case_res.stability.Cn_r
res.stability.static.neutral_point[i][0] = case_res.stability.neutral_point
# aero surface forces file
res.aerodynamics.wing_areas[i][:] = case_res.aerodynamics.wing_areas
res.aerodynamics.wing_CLs[i][:] = case_res.aerodynamics.wing_CLs
res.aerodynamics.wing_CDs[i][:] = case_res.aerodynamics.wing_CDs
# aero sectional forces file
res.aerodynamics.wing_local_spans[i][:] = case_res.aerodynamics.wing_local_spans
res.aerodynamics.wing_section_chords[i][:] = case_res.aerodynamics.wing_section_chords
res.aerodynamics.wing_section_cls[i][:] = case_res.aerodynamics.wing_section_cls
res.aerodynamics.wing_section_induced_angle[i][:] = case_res.aerodynamics.wing_section_aoa_i
res.aerodynamics.wing_section_cds[i][:] = case_res.aerodynamics.wing_section_cds
res.stability.static.control_surfaces_cases[tag] = case_res.stability.control_surfaces
return res
| 68.27451 | 143 | 0.536569 | 2,498 | 20,892 | 4.233787 | 0.082466 | 0.217852 | 0.217852 | 0.11649 | 0.791887 | 0.64618 | 0.553423 | 0.522693 | 0.435798 | 0.422088 | 0 | 0.008367 | 0.36502 | 20,892 | 305 | 144 | 68.498361 | 0.788859 | 0.077542 | 0 | 0.225225 | 0 | 0 | 0.000628 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.009009 | false | 0 | 0.018018 | 0 | 0.036036 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
da736a3f3dd605089f843d57e38b1950920c3c89 | 116 | py | Python | Python/Algorithm/SameConditional.py | piovezan/SOpt | a5ec90796b7bdf98f0675457fc4bb99c8695bc40 | [
"MIT"
] | 148 | 2017-08-03T01:49:27.000Z | 2022-03-26T10:39:30.000Z | Python/Algorithm/SameConditional.py | piovezan/SOpt | a5ec90796b7bdf98f0675457fc4bb99c8695bc40 | [
"MIT"
] | 3 | 2017-11-23T19:52:05.000Z | 2020-04-01T00:44:40.000Z | Python/Algorithm/SameConditional.py | piovezan/SOpt | a5ec90796b7bdf98f0675457fc4bb99c8695bc40 | [
"MIT"
] | 59 | 2017-08-03T01:49:19.000Z | 2022-03-31T23:24:38.000Z | if m > 1:
b = 'metros equivalem'
else:
b = 'metro equivale'
#https://pt.stackoverflow.com/q/413280/101
| 16.571429 | 42 | 0.612069 | 17 | 116 | 4.176471 | 0.941176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.11236 | 0.232759 | 116 | 6 | 43 | 19.333333 | 0.685393 | 0.353448 | 0 | 0 | 0 | 0 | 0.405405 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
da7c5ec2949431ebe68423aab555dc96ec34b8aa | 678 | py | Python | blog/blog/posts/views.py | akiracadet/django-rest-sandbox | d5eb8667328b20b85b41b814e1071aad4627fac3 | [
"MIT"
] | null | null | null | blog/blog/posts/views.py | akiracadet/django-rest-sandbox | d5eb8667328b20b85b41b814e1071aad4627fac3 | [
"MIT"
] | 4 | 2021-04-08T19:39:29.000Z | 2021-09-22T19:33:36.000Z | blog/blog/posts/views.py | akiracadet/django-rest-sandbox | d5eb8667328b20b85b41b814e1071aad4627fac3 | [
"MIT"
] | null | null | null | from rest_framework import generics
from rest_framework import permissions
from posts.permissions import IsAuthor
from posts.serializers import PostSerializer
from posts.models import Post
class PostListCreateAPIView(generics.ListCreateAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
class PostRetrieveUpdateAPIView(generics.RetrieveUpdateAPIView):
permission_classes = (IsAuthor, )
queryset = Post.objects.all()
serializer_class = PostSerializer
class PostRetrieveDestroyAPIView(generics.RetrieveDestroyAPIView):
permission_classes = (IsAuthor, )
queryset = Post.objects.all()
serializer_class = PostSerializer
| 27.12 | 66 | 0.803835 | 64 | 678 | 8.40625 | 0.390625 | 0.050186 | 0.105948 | 0.122677 | 0.395911 | 0.395911 | 0.395911 | 0.395911 | 0.282528 | 0.282528 | 0 | 0 | 0.135693 | 678 | 24 | 67 | 28.25 | 0.918089 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.3125 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
da7fc752c343e746a4bf14526a54e5436c49f950 | 175 | py | Python | lang/py/cookbook/v2/source/cb2_1_7_sol_2.py | ch1huizong/learning | 632267634a9fd84a5f5116de09ff1e2681a6cc85 | [
"MIT"
] | null | null | null | lang/py/cookbook/v2/source/cb2_1_7_sol_2.py | ch1huizong/learning | 632267634a9fd84a5f5116de09ff1e2681a6cc85 | [
"MIT"
] | null | null | null | lang/py/cookbook/v2/source/cb2_1_7_sol_2.py | ch1huizong/learning | 632267634a9fd84a5f5116de09ff1e2681a6cc85 | [
"MIT"
] | null | null | null | revwords = astring.split() # string -> list of words
revwords.reverse() # reverse the list in place
revwords = ' '.join(revwords) # list of strings -> string
| 43.75 | 58 | 0.64 | 21 | 175 | 5.333333 | 0.619048 | 0.107143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.245714 | 175 | 3 | 59 | 58.333333 | 0.848485 | 0.428571 | 0 | 0 | 0 | 0 | 0.010417 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
da86f2cd79fd556f281713005bf092a65cbabe82 | 647 | py | Python | radmc-3d/version_0.41/examples/run_simple_1_align/problem_plot.py | dlmatra/miao | 71799811b21a4249754390a8ec00972723edab99 | [
"MIT"
] | 1 | 2019-11-23T00:03:40.000Z | 2019-11-23T00:03:40.000Z | radmc-3d/version_0.41/examples/run_simple_1_align/problem_plot.py | dlmatra/miao | 71799811b21a4249754390a8ec00972723edab99 | [
"MIT"
] | 3 | 2021-05-26T12:54:50.000Z | 2021-05-27T10:58:48.000Z | radmc-3d/version_0.41/examples/run_simple_1_align/problem_plot.py | dlmatra/miao | 71799811b21a4249754390a8ec00972723edab99 | [
"MIT"
] | 1 | 2021-12-23T14:09:52.000Z | 2021-12-23T14:09:52.000Z | #
# First do, for example:
# radmc3d mctherm
# radmc3d image lambda 0.4e3 theta 45 phi 45 nostar
#
# or viewed at an angle:
#
# radmc3d image lambda 0.4e3 theta 45 phi 45 nostar
#
# Then:
#
# ipython --matplotlib
# %run problem_plot
#
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import matplotlib.pyplot as plt
import math
import numpy as np
from radmc3dPy.image import *
from plotpoldir import *
au = 1.49598e13 # Astronomical Unit [cm]
a = readImage()
plotImage(a,cmap=cm.hot,au=True,bunit='inu')
plotpoldir(a.x/au,a.y/au,a.image[:,:,0,0],a.image[:,:,1,0],a.image[:,:,2,0])
| 21.566667 | 76 | 0.700155 | 101 | 647 | 4.475248 | 0.544554 | 0.039823 | 0.079646 | 0.084071 | 0.176991 | 0.176991 | 0.176991 | 0.176991 | 0.176991 | 0.176991 | 0 | 0.059813 | 0.173107 | 647 | 29 | 77 | 22.310345 | 0.785047 | 0.378671 | 0 | 0 | 0 | 0 | 0.007833 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.636364 | 0 | 0.636364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
da8c75f586a4595ef3cbc889571c201fde3b358c | 1,566 | py | Python | Crawler/modify.py | codeup38/Codeup-OJ-Crawler | e51e1a90cca623b37c89e6236f4c79ca605867e8 | [
"MIT"
] | 3 | 2019-09-04T10:00:06.000Z | 2020-03-24T13:20:42.000Z | Crawler/modify.py | codeup38/Codeup-OJ-Crawler | e51e1a90cca623b37c89e6236f4c79ca605867e8 | [
"MIT"
] | null | null | null | Crawler/modify.py | codeup38/Codeup-OJ-Crawler | e51e1a90cca623b37c89e6236f4c79ca605867e8 | [
"MIT"
] | 3 | 2019-11-27T10:48:45.000Z | 2020-06-17T08:10:31.000Z | #
#
#
#
#
#
#
def modifyData():
file = open("data_origin.txt", 'r')
printFile = open("raw_data.txt",'w+')
while True:
text = file.readline()
res = text.split()
if not text:
break
if res[3] == '실행' or res[3] == '출력': # 실행 중 에러 or 출력 한계 초과
for i in range(len(res)-2):
if i == 3:
printFile.write(res[3]+res[4]+res[5])
printFile.write(' ')
elif i < 3:
printFile.write(res[i])
printFile.write(' ')
else:
printFile.write(res[i+2])
printFile.write(' ')
printFile.write('\n')
elif res[3] == '컴파일' and res[4] == '중': # 컴파일 중 ---
print('데이터 내에 \'컴파일 중\'인 데이터가 있습니다.\n')
print(res[0],'번을 확인해주세요. (크롤링에서 제외됩니다)\n')
else:
for i in range(len(res)-1):
if i == 3:
printFile.write(res[3]+res[4])
printFile.write(' ')
elif i < 3:
printFile.write(res[i])
printFile.write(' ')
else:
printFile.write(res[i+1])
printFile.write(' ')
printFile.write('\n')
file.close()
printFile.close()
if __name__ == '__main__':
modifyData()
print('Done!')
input('Press Any Key ')
| 23.029412 | 66 | 0.380587 | 161 | 1,566 | 3.639752 | 0.385093 | 0.334471 | 0.174061 | 0.109215 | 0.498294 | 0.399317 | 0.341297 | 0.341297 | 0.341297 | 0.25256 | 0 | 0.022032 | 0.478289 | 1,566 | 67 | 67 | 23.373134 | 0.695226 | 0.018519 | 0 | 0.414634 | 0 | 0 | 0.08055 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02439 | false | 0 | 0 | 0 | 0.02439 | 0.463415 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
da932f753addb664945f5b1151c7313da2752088 | 410 | py | Python | tests/unit-tests/test_health_endpoint.py | Ycallaer/schema_registry_viz | dfd474445ff7f5188d92fca2091d3dcedf48106f | [
"MIT"
] | 1 | 2021-07-07T08:04:57.000Z | 2021-07-07T08:04:57.000Z | tests/unit-tests/test_health_endpoint.py | Ycallaer/schema_registry_viz | dfd474445ff7f5188d92fca2091d3dcedf48106f | [
"MIT"
] | null | null | null | tests/unit-tests/test_health_endpoint.py | Ycallaer/schema_registry_viz | dfd474445ff7f5188d92fca2091d3dcedf48106f | [
"MIT"
] | null | null | null | import os
from starlette.testclient import TestClient
from schema_reg_viz.main import app
import pytest
@pytest.fixture(scope="session")
def init_test_client(pytestconfig):
return TestClient(app)
def test_health(init_test_client):
response = init_test_client.get("/health")
result = response.json()
assert response.status_code == 200
assert result["app_name"] == "schema registry viz" | 22.777778 | 54 | 0.758537 | 55 | 410 | 5.454545 | 0.563636 | 0.08 | 0.14 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008571 | 0.146341 | 410 | 18 | 54 | 22.777778 | 0.848571 | 0 | 0 | 0 | 0 | 0 | 0.099757 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0.166667 | false | 0 | 0.333333 | 0.083333 | 0.583333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
16f2df4aab4099e8e7dc48f83dfe34985eb61545 | 1,900 | py | Python | book_manage/forms.py | r-e-d-ant/Library-books-management | 37c7970737942956d14e8b0e2878981f1af4953e | [
"MIT"
] | 1 | 2021-03-14T19:22:33.000Z | 2021-03-14T19:22:33.000Z | book_manage/forms.py | r-e-d-ant/library-books-management | 37c7970737942956d14e8b0e2878981f1af4953e | [
"MIT"
] | 1 | 2021-12-16T11:32:36.000Z | 2021-12-17T07:51:31.000Z | book_manage/forms.py | r-e-d-ant/library-books-management | 37c7970737942956d14e8b0e2878981f1af4953e | [
"MIT"
] | 1 | 2021-08-12T15:22:11.000Z | 2021-08-12T15:22:11.000Z | from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, SubmitField, PasswordField
from wtforms.fields.html5 import DateField
from wtforms.validators import DataRequired, EqualTo, Email, Length, ValidationError
from book_manage.models import Admin
# Register forms
class RegistrationForm(FlaskForm):
username = StringField('Full name', validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email address', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
submit = SubmitField('Register')
def validate_username(self, username):
admin = Admin.query.filter_by(username=username.data).first()
if admin:
raise ValidationError('That username is taken. Please choose a different one')
def validate_email(self, email):
admin = Admin.query.filter_by(email=email.data).first()
if admin:
raise ValidationError('That email is taken. Please choose a different one')
# Login forms
class LoginForm(FlaskForm):
email = StringField('Email address', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
submit = SubmitField('Login')
# Upload forms
class Upload(FlaskForm):
title = StringField('Book title', validators=[DataRequired()])
author = StringField('Book author')
description = TextAreaField('Book description')
submit = SubmitField('Register the book')
# Borrow forms
class Borrow(FlaskForm):
customer = StringField('Customer name', validators=[DataRequired()])
title = StringField('Book title', validators=[DataRequired()])
author = StringField('Book author')
borrow_date = DateField('Borrow date')
return_date = DateField('Return date')
submit = SubmitField('Borrow')
| 35.849057 | 91 | 0.704737 | 196 | 1,900 | 6.790816 | 0.336735 | 0.132231 | 0.039068 | 0.042074 | 0.438768 | 0.404207 | 0.404207 | 0.296018 | 0.296018 | 0.296018 | 0 | 0.002579 | 0.183684 | 1,900 | 52 | 92 | 36.538462 | 0.855577 | 0.027368 | 0 | 0.294118 | 0 | 0 | 0.153554 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0.088235 | 0.147059 | 0 | 0.823529 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
16f968f0c1e43847d7c88922868b0b13c278eebd | 4,848 | py | Python | src/Utils.py | Charamba/Cross-Ratio-Arrays-Shape-Descriptor | b252814f54bb11c2519a1bd4d0be3f524faba901 | [
"Unlicense"
] | 2 | 2021-09-05T15:50:02.000Z | 2022-01-05T03:10:50.000Z | src/Utils.py | Charamba/Cross-Ratio-Arrays-Shape-Descriptor | b252814f54bb11c2519a1bd4d0be3f524faba901 | [
"Unlicense"
] | null | null | null | src/Utils.py | Charamba/Cross-Ratio-Arrays-Shape-Descriptor | b252814f54bb11c2519a1bd4d0be3f524faba901 | [
"Unlicense"
] | null | null | null | from Point import *
from scipy.spatial import ConvexHull
def unpackPoint(P):
if type(P) == P2_Point:
#P.normalize()
(x, y, z) = P.toTuple()
else:
(x, y) = P.toTuple()
z = 1
return (x, y, z)
def sortPoints(points, P0):
# SORT Points by distance to P0
return [Pi for (Pi, d) in sorted([(Pi, P0.euclideanDistance(Pi)) for Pi in points], key=lambda t: t[1])]
def removeDuplicates(seq):
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
def pairsCombinations(elements):
pairs = []
for i in range(0, len(elements)):
pairs += [(elements[i],e2) for e2 in elements[i+1:]]
return pairs
def inBoundingBox(P, P1, P2):
# print("P1 = ", P1)
# print("P2 = ", P2)
xMin = min(P1.x, P2.x)
xMax = max(P1.x, P2.x)
yMin = min(P1.y, P2.y)
yMax = max(P1.y, P2.y)
(x, y, _) = unpackPoint(P)
# print("xMin = ", xMin)
# print("x = ", x)
# print("xMax = ", xMax)
# print("yMin = ", yMin)
# print("y = ", y)
# print("yMax = ", yMax)
tol = 1E-9
#(math.floor(xMin) <= math.floor(x) <= math.floor(xMax)) and (math.floor(yMin) <= math.floor(y) <= math.floor(yMax))
return (yMin - tol <= y <= yMax + tol) and (xMin - tol <= x <= xMax + tol)
#return (xMin - tol <= x <= xMax + tol) or (yMin - tol <= y <= yMax + tol)
def convex_hull_vertices(points, interpoints_sample=0):
wPListofList = []
for point in points:
(x, y) = point.toTuple()
wPListofList.append([x, y])
hull = ConvexHull(wPListofList, incremental=True)
vertices = []
for vIndex in hull.vertices:
[x, y] = wPListofList[vIndex]
vertices.append(R2_Point(x, y))
hull_points = []
if interpoints_sample > 0:
vertices += [vertices[0]]
for i in range(0, len(vertices) - 1):
p0 = vertices[i]
pf = vertices[i+1]
vd = pf - p0
#vd.r2Normalize()
for l in np.arange(0, 1, 1/interpoints_sample):
pl = p0 + l*vd
hull_points.append(pl)
else:
hull_points = vertices
return hull_points
def convex_hull_vertices_add_points(points, additive_points_number=0):
wPListofList = []
for point in points:
(x, y) = point.toTuple()
wPListofList.append([x, y])
hull = ConvexHull(wPListofList, incremental=True)
vertices = []
length_Hull = 0
for vIndex in hull.vertices:
[x, y] = wPListofList[vIndex]
vertices.append(R2_Point(x, y))
for i in range(0, len(vertices) - 1):
p0 = vertices[i]
pf = vertices[i+1]
vd = pf - p0
length_Hull += vd.length()
p0 = vertices[-1]
pf = vertices[0]
vd = pf - p0
length_Hull += vd.length()
hull_points = []
residue_length = 0
partial_points_number = 0
if additive_points_number > 0:
step_size = length_Hull/additive_points_number
vertices += [vertices[0]]
for i in range(0, len(vertices) - 1):
p0 = vertices[i]
pf = vertices[i+1]
vd = pf - p0
seg_len = vd.length()
#vd.r2Normalize()
n_inter_points = seg_len/step_size
partial_points_number += n_inter_points
if n_inter_points <= 1.0:
residue_length += seg_len
# for l in np.arange(0, 1, 1/n_inter_points):
# pl = p0 + l*vd
# hull_points.append(pl)
residue_points_number = residue_length/step_size
step_size = length_Hull/(additive_points_number + residue_points_number)
for i in range(0, len(vertices) - 1):
p0 = vertices[i]
pf = vertices[i+1]
vd = pf - p0
seg_len = vd.length()
#vd.r2Normalize()
n_inter_points = seg_len/step_size
for l in np.arange(0, 1, 1/n_inter_points):
pl = p0 + l*vd
hull_points.append(pl)
else:
hull_points = vertices
#print("residue_length = ", residue_length)
return hull_points
def convex_hull_vertices_add_points_new(points, additive_points_number=0):
wPListofList = []
for point in points:
(x, y) = point.toTuple()
wPListofList.append([x, y])
hull = ConvexHull(wPListofList, incremental=True)
vertices = []
length_Hull = 0
for vIndex in hull.vertices:
[x, y] = wPListofList[vIndex]
vertices.append(R2_Point(x, y))
for i in range(0, len(vertices) - 1):
p0 = vertices[i]
pf = vertices[i+1]
vd = pf - p0
length_Hull += vd.length()
p0 = vertices[-1]
pf = vertices[0]
vd = pf - p0
length_Hull += vd.length()
hull_points = []
if additive_points_number > 0:
step_size = length_Hull/additive_points_number
vertices += [vertices[0]]
for i in range(0, len(vertices) - 1):
p0 = vertices[i]
pf = vertices[i+1]
vd = pf - p0
seg_len = vd.length()
#vd.r2Normalize()
n_inter_points = seg_len/step_size
for l in np.arange(0, 1, 1/n_inter_points):
pl = p0 + l*vd
hull_points.append(pl)
for i in range(0, len(vertices) - 1):
p0 = vertices[i]
pf = vertices[i+1]
vd = pf - p0
seg_len = vd.length()
#vd.r2Normalize()
n_inter_points = seg_len/step_size
for l in np.arange(0, 1, 1/n_inter_points):
pl = p0 + l*vd
hull_points.append(pl)
else:
hull_points = vertices
return hull_points
| 23.533981 | 117 | 0.646452 | 770 | 4,848 | 3.92987 | 0.123377 | 0.010575 | 0.039656 | 0.029081 | 0.679775 | 0.659947 | 0.65499 | 0.642432 | 0.636814 | 0.611699 | 0 | 0.029061 | 0.205033 | 4,848 | 205 | 118 | 23.64878 | 0.756098 | 0.123969 | 0 | 0.731034 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055172 | false | 0 | 0.013793 | 0.006897 | 0.124138 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e50aa3fd4b4f98e21447bfb7e93e53d460ce1d0a | 919 | py | Python | test/examples/test_get_yield_from_iter.py | pmp-p/x-python | e5bdc15af1bf9cf696b2d9a8e1a02a4863b1fb8a | [
"MIT"
] | null | null | null | test/examples/test_get_yield_from_iter.py | pmp-p/x-python | e5bdc15af1bf9cf696b2d9a8e1a02a4863b1fb8a | [
"MIT"
] | null | null | null | test/examples/test_get_yield_from_iter.py | pmp-p/x-python | e5bdc15af1bf9cf696b2d9a8e1a02a4863b1fb8a | [
"MIT"
] | null | null | null | # Test of 3.5+ GET_YIELD_FROM_ITER
# Code is from https://stackoverflow.com/questions/41136410/python-yield-from-or-return-a-generator
def add(a, b):
return a + b
def sqrt(a):
return a ** 0.5
data1 = [*zip(range(1, 3))] # [(1,), (2,)]
job1 = (sqrt, data1)
def gen_factory(func, seq):
"""Generator factory returning a generator."""
# do stuff ... immediately when factory gets called
print("build generator & return")
return (func(*args) for args in seq)
def gen_generator(func, seq):
"""Generator yielding from sub-generator inside."""
# do stuff ... first time when 'next' gets called
print("build generator & yield")
yield from (func(*args) for args in seq)
gen_fac = gen_factory(*job1)
print(gen_fac)
# build generator & return <-- printed immediately
print(next(gen_fac)) # start
# Out: 1.0
print([*gen_fac]) # deplete rest of generator
# Out: [1.4142135623730951]
| 27.848485 | 99 | 0.67247 | 137 | 919 | 4.437956 | 0.437956 | 0.039474 | 0.052632 | 0.065789 | 0.161184 | 0.065789 | 0 | 0 | 0 | 0 | 0 | 0.051862 | 0.181719 | 919 | 32 | 100 | 28.71875 | 0.756649 | 0.484222 | 0 | 0 | 0 | 0 | 0.103524 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0.125 | 0.4375 | 0.3125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
e51257ee73fdfeff05c04df9cd6363317b10c3aa | 156 | py | Python | custom_components/nintendo_wishlist/const.py | custom-components/sensor.nintendo_wishlis | 6709a5c1b6e323494e7449fa1ac24e61100fc302 | [
"Apache-2.0"
] | 13 | 2020-05-07T21:31:51.000Z | 2022-02-09T01:53:53.000Z | custom_components/nintendo_wishlist/const.py | custom-components/sensor.nintendo_wishlis | 6709a5c1b6e323494e7449fa1ac24e61100fc302 | [
"Apache-2.0"
] | 19 | 2019-07-24T08:10:06.000Z | 2022-02-05T04:09:34.000Z | custom_components/nintendo_wishlist/const.py | custom-components/sensor.nintendo_wishlis | 6709a5c1b6e323494e7449fa1ac24e61100fc302 | [
"Apache-2.0"
] | 5 | 2019-12-13T17:48:52.000Z | 2020-07-06T07:45:31.000Z | from datetime import timedelta
CONF_COUNTRY = "country"
CONF_WISHLIST = "wishlist"
DOMAIN = "nintendo_wishlist"
DEFAULT_SCAN_INTERVAL = timedelta(hours=1)
| 22.285714 | 42 | 0.807692 | 19 | 156 | 6.368421 | 0.736842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007194 | 0.108974 | 156 | 6 | 43 | 26 | 0.863309 | 0 | 0 | 0 | 0 | 0 | 0.205128 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e522120203f44a7abbb01fa435f4de2a21243828 | 2,213 | py | Python | var/spack/repos/builtin/packages/visionary-dls/package.py | electronicvisions/spack | d6121eb35b4948f7d8aef7ec7a305a5123a7439e | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2019-02-10T13:47:48.000Z | 2019-04-17T13:05:17.000Z | var/spack/repos/builtin/packages/visionary-dls/package.py | einc-eu/spack | 15468b92ed21d970c0111ae19144e85e66746433 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 8 | 2021-05-28T06:39:59.000Z | 2022-03-30T15:12:35.000Z | var/spack/repos/builtin/packages/visionary-dls/package.py | einc-eu/spack | 15468b92ed21d970c0111ae19144e85e66746433 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2018-04-06T09:04:11.000Z | 2020-01-24T12:52:12.000Z | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class VisionaryDls(Package):
"""Visionary Meta Package - software needed for various experiments running
on DLS (be it spiking or hagen mode)
"""
homepage = ''
# some random tarball, to make `spack fetch --dependencies visionary-defaults` work
url = 'https://github.com/electronicvisions/spack/archive/v0.8.tar.gz'
# This is only a dummy tarball (see difference between version numbers)
# TODO: as soon as a MetaPackage-concept has been merged, please update this package
version('1.0', '372ce038842f20bf0ae02de50c26e85d', url='https://github.com/electronicvisions/spack/archive/v0.8.tar.gz')
variant("dev", default=True, description="With visionary-dev-tools")
depends_on("visionary-dls-core")
depends_on("visionary-dev-tools", when="+dev")
depends_on('py-breathe')
depends_on('py-brian2')
depends_on('py-flask')
depends_on('py-h5py')
depends_on('py-ipycanvas')
depends_on('py-ipywidgets')
depends_on('py-lxml') # collab tests
depends_on('py-myst-parser')
depends_on('py-notebook')
depends_on('py-numba')
depends_on('py-pandas')
depends_on('py-python-socketio')
depends_on('py-pytorch-ignite')
depends_on('py-pytorch-lightning')
depends_on('py-sacred')
depends_on('py-scikit-learn')
depends_on('py-seaborn')
depends_on('py-soundfile')
depends_on('py-sphinx')
depends_on('py-sphinx-rtd-theme')
depends_on('py-sphinxcontrib-jupyter')
depends_on('py-sqlalchemy')
depends_on('py-tqdm')
depends_on('py-torchvision')
depends_on('py-wfdb-python')
depends_on('py-yccp@1.0.0:')
depends_on('xerces-c')
# TODO Re-enable once https://github.com/spack/spack/pull/13112 is merged
# depends_on('tensorflow')
def install(self, spec, prefix):
mkdirp(prefix.etc)
# store a copy of this package.
install(__file__, join_path(prefix.etc, spec.name + '.py'))
# we could create some filesystem view here?
| 34.578125 | 124 | 0.691821 | 304 | 2,213 | 4.921053 | 0.526316 | 0.180481 | 0.191176 | 0.022727 | 0.072193 | 0.072193 | 0.072193 | 0.072193 | 0.072193 | 0.072193 | 0 | 0.025123 | 0.172616 | 2,213 | 63 | 125 | 35.126984 | 0.791917 | 0.324446 | 0 | 0 | 0 | 0 | 0.381892 | 0.038121 | 0 | 0 | 0 | 0.015873 | 0 | 1 | 0.026316 | false | 0 | 0.026316 | 0 | 0.131579 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e52546857ee101ecd5ea1303a3507de57faa021b | 793 | py | Python | formulaic/types.py | jzaleski/simpleforms | 898576920845595927e764d14022cd0c95a793c4 | [
"MIT"
] | 2 | 2017-07-11T13:42:43.000Z | 2017-12-09T01:17:08.000Z | formulaic/types.py | jzaleski/simpleforms | 898576920845595927e764d14022cd0c95a793c4 | [
"MIT"
] | null | null | null | formulaic/types.py | jzaleski/simpleforms | 898576920845595927e764d14022cd0c95a793c4 | [
"MIT"
] | null | null | null | __all__ = ('Type',)
import six
if six.PY3:
long = int
class Type(object):
"""
Class providing type-mapping constants
Class Attributes:
BOOLEAN (callable): the `bool[ean]` type-mapping
DICTIONARY (callable): the `dict` type-mapping
FLOAT (callable): the `float` type-mapping
INTEGER (callable): the `int[eger] type-mapping
LONG (callable): the `long` type-mapping
STRING (callable): the `str[ing]` type-mapping
TEXT (callable): the `text` type-mapping
UUID (callable): the `uuid` type-mapping
"""
BOOLEAN = bool
DICTIONARY = dict
FLOAT = float
INTEGER = six.integer_types
LIST = list
LONG = long
STRING = six.string_types
TEXT = six.text_type
UUID = six.string_types
| 23.323529 | 56 | 0.62169 | 96 | 793 | 5.052083 | 0.322917 | 0.204124 | 0.057732 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001727 | 0.269861 | 793 | 33 | 57 | 24.030303 | 0.835924 | 0.562421 | 0 | 0 | 0 | 0 | 0.013559 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.785714 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
e554189487b8c3a2cf0d0797293dec855884a0a0 | 3,666 | py | Python | pgoapi/protos/POGOProtos/Networking/Responses/DownloadSettingsResponse_pb2.py | PogoHop/pgoapi-hsvr | b5761159e0240bbb81ef6c257fe2eb1bc1ce2d47 | [
"MIT"
] | null | null | null | pgoapi/protos/POGOProtos/Networking/Responses/DownloadSettingsResponse_pb2.py | PogoHop/pgoapi-hsvr | b5761159e0240bbb81ef6c257fe2eb1bc1ce2d47 | [
"MIT"
] | null | null | null | pgoapi/protos/POGOProtos/Networking/Responses/DownloadSettingsResponse_pb2.py | PogoHop/pgoapi-hsvr | b5761159e0240bbb81ef6c257fe2eb1bc1ce2d47 | [
"MIT"
] | null | null | null | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Networking/Responses/DownloadSettingsResponse.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from POGOProtos.Settings import GlobalSettings_pb2 as POGOProtos_dot_Settings_dot_GlobalSettings__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Networking/Responses/DownloadSettingsResponse.proto',
package='POGOProtos.Networking.Responses',
syntax='proto3',
serialized_pb=_b('\n>POGOProtos/Networking/Responses/DownloadSettingsResponse.proto\x12\x1fPOGOProtos.Networking.Responses\x1a(POGOProtos/Settings/GlobalSettings.proto\"n\n\x18\x44ownloadSettingsResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x0c\n\x04hash\x18\x02 \x01(\t\x12\x35\n\x08settings\x18\x03 \x01(\x0b\x32#.POGOProtos.Settings.GlobalSettingsb\x06proto3')
,
dependencies=[POGOProtos_dot_Settings_dot_GlobalSettings__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_DOWNLOADSETTINGSRESPONSE = _descriptor.Descriptor(
name='DownloadSettingsResponse',
full_name='POGOProtos.Networking.Responses.DownloadSettingsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='error', full_name='POGOProtos.Networking.Responses.DownloadSettingsResponse.error', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hash', full_name='POGOProtos.Networking.Responses.DownloadSettingsResponse.hash', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='settings', full_name='POGOProtos.Networking.Responses.DownloadSettingsResponse.settings', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=141,
serialized_end=251,
)
_DOWNLOADSETTINGSRESPONSE.fields_by_name['settings'].message_type = POGOProtos_dot_Settings_dot_GlobalSettings__pb2._GLOBALSETTINGS
DESCRIPTOR.message_types_by_name['DownloadSettingsResponse'] = _DOWNLOADSETTINGSRESPONSE
DownloadSettingsResponse = _reflection.GeneratedProtocolMessageType('DownloadSettingsResponse', (_message.Message,), dict(
DESCRIPTOR = _DOWNLOADSETTINGSRESPONSE,
__module__ = 'POGOProtos.Networking.Responses.DownloadSettingsResponse_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Responses.DownloadSettingsResponse)
))
_sym_db.RegisterMessage(DownloadSettingsResponse)
# @@protoc_insertion_point(module_scope)
| 42.137931 | 369 | 0.772777 | 408 | 3,666 | 6.676471 | 0.301471 | 0.076725 | 0.106461 | 0.17511 | 0.412261 | 0.321953 | 0.217327 | 0.179883 | 0.165932 | 0.165932 | 0 | 0.02643 | 0.12275 | 3,666 | 86 | 370 | 42.627907 | 0.820585 | 0.080469 | 0 | 0.298507 | 1 | 0.014925 | 0.264715 | 0.247636 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.104478 | 0 | 0.104478 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e56841a06121195a9d3fdeda8e70e12801c41262 | 1,803 | py | Python | pysmapi/interfaces/Image_Delete_DM.py | lllucius/pysmapi | c0d802edb58e835e4d48cb9c28ccfccfe5b5c686 | [
"Apache-2.0"
] | null | null | null | pysmapi/interfaces/Image_Delete_DM.py | lllucius/pysmapi | c0d802edb58e835e4d48cb9c28ccfccfe5b5c686 | [
"Apache-2.0"
] | null | null | null | pysmapi/interfaces/Image_Delete_DM.py | lllucius/pysmapi | c0d802edb58e835e4d48cb9c28ccfccfe5b5c686 | [
"Apache-2.0"
] | null | null | null |
# Copyright 2018-2019 Leland Lucius
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
from pysmapi.smapi import *
class Image_Delete_DM(Request):
# Data security erase
UNSPECIFIED = 0
NOERASE = 1
ERASE = 2
data_security_erasure_names = {UNSPECIFIED: "UNSPECIFIED", NOERASE: "NOERASE", ERASE: "ERASE"}
def __init__(self,
data_security_erase = UNSPECIFIED,
**kwargs):
super(Image_Delete_DM, self).__init__(**kwargs)
# Request parameters
self._data_security_erase = data_security_erase
# Response values
self._operation_id = 0
@property
def data_security_erase(self):
return self._data_security_erase
@data_security_erase.setter
def data_security_erase(self, value):
self._data_security_erase = value
@property
def operation_id(self):
return self._operation_id
@operation_id.setter
def operation_id(self, value):
self._operation_id = value
def pack(self):
# data_security_erase (int1)
buf = struct.pack("B", self._data_security_erase)
return buf
def unpack(self, buf):
# operation_id (int4; range -1-2147483647)
self._operation_id, = struct.unpack("!I", buf[:4])
| 28.171875 | 98 | 0.681642 | 233 | 1,803 | 5.055794 | 0.454936 | 0.122241 | 0.158744 | 0.106961 | 0.105263 | 0.064516 | 0.064516 | 0 | 0 | 0 | 0 | 0.021834 | 0.237937 | 1,803 | 63 | 99 | 28.619048 | 0.835517 | 0.374931 | 0 | 0.066667 | 0 | 0 | 0.023529 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.233333 | false | 0 | 0.066667 | 0.066667 | 0.566667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
e5b1467b225a1034e82ffffd60517fc125f3f32f | 394 | py | Python | handler/HomeHandler.py | animal-breeding-zoo/typhoon-weather | 762e2971889668236afef9b966c67ae1a4bbf532 | [
"BSD-3-Clause"
] | null | null | null | handler/HomeHandler.py | animal-breeding-zoo/typhoon-weather | 762e2971889668236afef9b966c67ae1a4bbf532 | [
"BSD-3-Clause"
] | null | null | null | handler/HomeHandler.py | animal-breeding-zoo/typhoon-weather | 762e2971889668236afef9b966c67ae1a4bbf532 | [
"BSD-3-Clause"
] | null | null | null | # encoding: utf-8
from tornado import gen, web
from .BaseHandler import BaseHandler
class HomeHandler(BaseHandler):
@gen.coroutine
def get(self):
"""
delay = self.get_argument('delay', 5)
yield gen.sleep(int(delay))
self.write({"status": 1, "msg":"oh success"})
self.finish()
"""
self.redirect("/propagation/北京", permanent=True)
| 26.266667 | 56 | 0.609137 | 46 | 394 | 5.195652 | 0.717391 | 0.075314 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010101 | 0.246193 | 394 | 14 | 57 | 28.142857 | 0.794613 | 0.360406 | 0 | 0 | 0 | 0 | 0.073529 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
e5b717669e1d29e7b56da69adb84036c8874d734 | 609 | py | Python | dal/oracle/era/run.py | arrayexpress/ae_auto | 78e50cc31997cb5a69d0d74258b6b1a089ba387a | [
"Apache-2.0"
] | null | null | null | dal/oracle/era/run.py | arrayexpress/ae_auto | 78e50cc31997cb5a69d0d74258b6b1a089ba387a | [
"Apache-2.0"
] | 4 | 2020-06-05T19:26:42.000Z | 2022-03-29T21:55:14.000Z | dal/oracle/era/run.py | arrayexpress/ae_auto | 78e50cc31997cb5a69d0d74258b6b1a089ba387a | [
"Apache-2.0"
] | 1 | 2019-03-27T13:15:37.000Z | 2019-03-27T13:15:37.000Z | from dal.oracle.common import execute_select
from dal.oracle.era import db
__author__ = 'Ahmed G. Ali'
def retrieve_run_by_acc(acc):
sql = """SELECT * FROM RUN WHERE RUN_ID = '%s'""" % acc
return execute_select(sql, db)
def retrieve_runs_by_submission_acc(submission_acc):
sql = """SELECT * FROM RUN WHERE SUBMISSION_ID = '%s'""" % submission_acc
# print sql
# exit()
return execute_select(sql, db)
def retrieve_runs_by_experiment_acc(exp_acc):
sql = """SELECT * FROM RUN WHERE EXPERIMENT_ID = '%s'""" % exp_acc
# print sql
# exit()
return execute_select(sql, db)
| 27.681818 | 77 | 0.684729 | 89 | 609 | 4.393258 | 0.325843 | 0.132992 | 0.092072 | 0.122762 | 0.531969 | 0.531969 | 0.347826 | 0.347826 | 0.347826 | 0 | 0 | 0 | 0.195402 | 609 | 21 | 78 | 29 | 0.797959 | 0.054187 | 0 | 0.25 | 0 | 0 | 0.23993 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.166667 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
e5c2c0557ebb90c9528fda14b447bd1656378862 | 2,845 | py | Python | pydocspec/test/test_visitors.py | tristanlatr/pydocspec | 25965310c7f576d2f2e877f8a1b2984bb77725b9 | [
"MIT"
] | null | null | null | pydocspec/test/test_visitors.py | tristanlatr/pydocspec | 25965310c7f576d2f2e877f8a1b2984bb77725b9 | [
"MIT"
] | 5 | 2021-08-28T14:33:13.000Z | 2022-02-27T23:51:48.000Z | pydocspec/test/test_visitors.py | tristanlatr/pydocspec | 25965310c7f576d2f2e877f8a1b2984bb77725b9 | [
"MIT"
] | 1 | 2022-02-10T01:55:14.000Z | 2022-02-10T01:55:14.000Z | import pydocspec
from pydocspec.visitors import PrintVisitor, FilterVisitor
from .fixtures import root1, root3, root4
from . import CapSys
def test_visitors(capsys:CapSys, root1: pydocspec.TreeRoot) -> None:
module = root1.root_modules[0]
visitor = PrintVisitor(colorize=False)
module.walk(visitor)
captured = capsys.readouterr().out
assert captured == """:0 - Module: a
| :1 - Indirection: Union
| :2 - Class: foo
| | :4 - Data: val
| | :5 - Data: alias
| | :6 - Function: __init__
| :8 - Data: saila
"""
predicate = lambda ob: not isinstance(ob, pydocspec.Data) # removes any Data entries
filter_visitor = FilterVisitor(predicate)
module.walk(filter_visitor)
module.walk(visitor)
captured = capsys.readouterr().out
assert captured == """:0 - Module: a
| :1 - Indirection: Union
| :2 - Class: foo
| | :6 - Function: __init__
"""
def test_visitors2(capsys: CapSys, root3: pydocspec.TreeRoot) -> None:
module = root3.root_modules[0]
visitor = PrintVisitor(colorize=False)
module.walk(visitor)
captured = capsys.readouterr().out
assert captured == """:0 - Module: a
| :1 - Indirection: Union
| :2 - Class: foo
| | :4 - Data: _val
| | :5 - Data: _alias
| | :6 - Function: __init__
| :8 - Data: saila
"""
# removes entries starting by one underscore that are not dunder methods, aka private API.
predicate = lambda ob: not ob.name.startswith("_") or ob.name.startswith("__") and ob.name.endswith("__")
filter_visitor = FilterVisitor(predicate)
module.walk(filter_visitor)
module.walk(visitor)
captured = capsys.readouterr().out
assert captured == """:0 - Module: a
| :1 - Indirection: Union
| :2 - Class: foo
| | :6 - Function: __init__
| :8 - Data: saila
"""
def test_visitors3(capsys: CapSys, root1: pydocspec.TreeRoot, root4:pydocspec.TreeRoot) -> None:
module = root1.root_modules[0]
visitor = PrintVisitor(colorize=False)
module.walk(visitor)
captured = capsys.readouterr().out
assert captured == """:0 - Module: a
| :1 - Indirection: Union
| :2 - Class: foo
| | :4 - Data: val
| | :5 - Data: alias
| | :6 - Function: __init__
| :8 - Data: saila
"""
assert module.expand_name('saila') == 'a.foo.val'
# removes the foo class
predicate = lambda ob: False if ob.name=="foo" else True
filter_visitor = FilterVisitor(predicate)
module.walk(filter_visitor)
module.walk(visitor)
captured = capsys.readouterr().out
assert captured == """:0 - Module: a
| :1 - Indirection: Union
| :8 - Data: saila
"""
assert module.expand_name('saila') == 'foo.alias'
module.get_member('saila').add_siblings(root4.all_objects['a.f'])
module.walk(visitor)
captured = capsys.readouterr().out
assert captured == """:0 - Module: a
| :1 - Indirection: Union
| :8 - Data: saila
| :-1 - Function: f
""" | 29.947368 | 109 | 0.661863 | 353 | 2,845 | 5.209632 | 0.235127 | 0.054377 | 0.064709 | 0.09516 | 0.69603 | 0.668298 | 0.66286 | 0.66286 | 0.633496 | 0.633496 | 0 | 0.023054 | 0.191916 | 2,845 | 95 | 110 | 29.947368 | 0.77686 | 0.047452 | 0 | 0.795181 | 0 | 0 | 0.30181 | 0 | 0 | 0 | 0 | 0 | 0.108434 | 1 | 0.036145 | false | 0 | 0.048193 | 0 | 0.084337 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e5de6985beac654bb327c4266e63e08d8a566ff3 | 613 | py | Python | python/strings/score-of-parenthesis.py | prakashsellathurai/a-grim-loth | 656e6eea8e6c1761f2705519ea05d6ddb1d4beb7 | [
"MIT"
] | 4 | 2021-06-26T17:18:47.000Z | 2022-02-02T15:02:27.000Z | python/strings/score-of-parenthesis.py | prakashsellathurai/a-grim-loth | 656e6eea8e6c1761f2705519ea05d6ddb1d4beb7 | [
"MIT"
] | 8 | 2021-06-29T07:00:32.000Z | 2021-12-01T11:26:22.000Z | python/strings/score-of-parenthesis.py | prakashsellathurai/a-grim-loth | 656e6eea8e6c1761f2705519ea05d6ddb1d4beb7 | [
"MIT"
] | 3 | 2021-07-14T14:42:08.000Z | 2021-12-07T19:36:53.000Z | import unittest
def scoreOfParentheses(s: str) -> int:
ans = bal = 0
for i, x in enumerate(s):
if x == "(":
bal += 1
else:
bal -= 1
if s[i - 1] == "(":
ans += 1 << bal
return ans
class TestStringMethods(unittest.TestCase):
def test_all_cases(self):
self.assertEqual(scoreOfParentheses("()"), 1)
self.assertEqual(scoreOfParentheses("(())"), 2)
self.assertEqual(scoreOfParentheses("()()"), 2)
self.assertEqual(scoreOfParentheses("(()(()))"), 6)
if __name__ == "__main__":
unittest.main()
| 22.703704 | 59 | 0.533442 | 62 | 613 | 5.112903 | 0.5 | 0.189274 | 0.416404 | 0.214511 | 0.318612 | 0.318612 | 0.318612 | 0 | 0 | 0 | 0 | 0.021028 | 0.301794 | 613 | 26 | 60 | 23.576923 | 0.719626 | 0 | 0 | 0 | 0 | 0 | 0.045677 | 0 | 0 | 0 | 0 | 0 | 0.210526 | 1 | 0.105263 | false | 0 | 0.052632 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e5e4e280931c5bb689f34b2d610fb5d37df29632 | 536 | py | Python | Problem Set 0 - Student Version/locator.py | MuhammeedAlaa/MI-Assignemnets | c90deb44c609d55ac5f7be343fed93d32e44c1e8 | [
"MIT"
] | null | null | null | Problem Set 0 - Student Version/locator.py | MuhammeedAlaa/MI-Assignemnets | c90deb44c609d55ac5f7be343fed93d32e44c1e8 | [
"MIT"
] | null | null | null | Problem Set 0 - Student Version/locator.py | MuhammeedAlaa/MI-Assignemnets | c90deb44c609d55ac5f7be343fed93d32e44c1e8 | [
"MIT"
] | null | null | null | from typing import Any, Set, Tuple
from grid import Grid
import utils
def locate(grid: Grid, item: Any) -> Set[Tuple[int, int]]:
'''
This function takes a 2D grid and an item
It should return a list of (x, y) coordinates that specify the locations that contain the given item
To know how to use the Grid class, see the file "grid.py"
'''
# TODO: ADD YOUR CODE HERE
return {element for element in [(x, y) for y in range(0, grid.height) for x in range(0, grid.width)] if grid.__getitem__(element) == item}
| 38.285714 | 142 | 0.682836 | 93 | 536 | 3.892473 | 0.591398 | 0.033149 | 0.060773 | 0.066298 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007212 | 0.223881 | 536 | 13 | 143 | 41.230769 | 0.862981 | 0.425373 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 0 | 1 | 0.2 | false | 0 | 0.6 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
e5e6bf091128afc444b824ffacf11b102e11b951 | 5,425 | py | Python | webshotter.py | blazeinfosec/webshotter | dbbc9e0740feb10c4270ed9e91916ae7b260d3fa | [
"Apache-2.0"
] | 17 | 2016-05-19T17:51:28.000Z | 2019-12-07T18:33:18.000Z | webshotter.py | blazeinfosec/webshotter | dbbc9e0740feb10c4270ed9e91916ae7b260d3fa | [
"Apache-2.0"
] | 2 | 2017-08-29T22:36:48.000Z | 2020-07-12T17:57:18.000Z | webshotter.py | blazeinfosec/webshotter | dbbc9e0740feb10c4270ed9e91916ae7b260d3fa | [
"Apache-2.0"
] | 12 | 2016-05-19T21:28:54.000Z | 2019-12-07T18:33:11.000Z | #!/usr/bin/python
# webshotter.py - create web page screenshots
#
# with code from http://stackoverflow.com/questions/18067021/how-do-i-generate-a-png-file-w-selenium-phantomjs-from-a-string
#
# by Julio Cesar Fort, Wildfire Labs /// Blaze Information Security
#
# Copyright 2015-2016, Blaze Information Security
# https://www.blazeinfosec.com
import sys
import datetime
import os
try:
from selenium import webdriver, common
except ImportError:
print("[!] Error importing Selenium web driver. Consult the README.md file for installation instructions.")
sys.exit(-1)
import argparse
import threading
try:
import Queue
except ImportError: # Python 3 in use Queue is now queue
import queue as Queue
# define global variables
queue = Queue.Queue()
height = 0
width = 0
VERBOSE = False
# Test for PhantomJS and exit if not found.
try:
driver = webdriver.PhantomJS()
driver = None
except (NameError, common.exceptions.WebDriverException):
print("[!] PhantomJS has not been detected. Consult the README.md file for installation instructions.")
exit(1)
class ThreadScreenshotter(threading.Thread):
def __init__(self, queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
global height
global width
while True:
url = self.queue.get()
print("Taking screenshot of " + url)
if VERBOSE:
print(self.getName() + " received argument: " + url)
take_screenshot(url, height, width)
self.queue.task_done() # notify the end of the task
def main():
global VERBOSE
global height
global width
global location
argparser = argparse.ArgumentParser(description='Webshotter - Create web page screenshots')
argparser.add_argument('urllist', help='file containing a list of URLs. Screenshots will be made of each URL') # mandatory argument
argparser.add_argument('-x', '--height', help='height of the headless browser, and the screenshot')
argparser.add_argument('-y', '--width', help='width of the headless browser, and the screenshot')
argparser.add_argument('-t', '--threads', help='number of concurrent threads (default: 1)')
argparser.add_argument('-v', '--verbose', action='store_true', help='toggle verbose mode')
argparser.add_argument('-l', '--location', help='save all screenshots into a particular directory')
args = argparser.parse_args()
url_list = args.urllist
num_threads = 1 # default value
''' setup threads and their queues '''
if args.threads and int(args.threads) > 0:
num_threads = int(args.threads)
if args.verbose:
VERBOSE = True
if args.location:
location = str(args.location)
if not os.path.exists(location): #create a new dir if int's not exist
os.makedirs(location)
else:
location = os.getcwd()
if not location.endswith("/"):
location = location+"/"
if VERBOSE:
print("Starting with " + str(num_threads) + " threads")
for n in range(num_threads):
if args.height and args.width and int(args.height) > 0 and int(args.width) > 0:
height = int(args.height)
width = int(args.width)
t = ThreadScreenshotter(queue)
t.setDaemon(True)
t.start()
try:
fd = open(url_list, "r")
except IOError as err:
print("[!] Error opening URL list file: %s" % str(err))
sys.exit(0)
urls = fd.read().splitlines()
fd.close()
''' reads URLs from file and put them in a queue to be used by the threads '''
for url in urls:
if not url.startswith("http"):
url = "http://" + url
queue.put(url)
queue.join() # wait for the queue to process everything
def take_screenshot(url, height, width):
''' Takes screenshot using PhantomJS's webdriver and saves the file on disk
This function gets called by the threaded screenshot class
'''
date_hour = get_date_hour()
save_file = url + '-screenshot-' + date_hour + '.png'
save_file = parse_filename(save_file)
save_file = str(location) + save_file
viable_screenshot = True
try:
# driver = webdriver.PhantomJS()
driver = webdriver.PhantomJS("phantomjs", service_args=['--ignore-ssl-errors=true'])
if height > 0 and width > 0:
driver.set_window_size(height, width)
driver.implicitly_wait(2)
driver.set_page_load_timeout(2)
try:
driver.get(url)
except common.exceptions.TimeoutException as e:
print("Timeout on " + str(url))
viable_screenshot = False
if viable_screenshot is True:
driver.save_screenshot(save_file)
driver.quit()
except common.exceptions.WebDriverException as e:
print("Error in PhantomJS: " + str(e))
def get_date_hour():
''' Returns date and hour in a friendly format for the filename '''
date_hour = str(datetime.datetime.now())
date_hour = date_hour.replace(" ", "_").replace(":", "")
# remove miliseconds
i = date_hour.find(".")
if i > 0:
date_hour = date_hour[:i]
return date_hour
def parse_filename(url):
''' This function sanitizes the URL to a filename to be saved in the filesystem '''
return url.replace(":", "_").replace("/", "_")
if __name__ == "__main__":
main()
| 31.540698 | 138 | 0.650507 | 696 | 5,425 | 4.972701 | 0.337644 | 0.025426 | 0.034672 | 0.013869 | 0.095926 | 0.060676 | 0.060676 | 0.060676 | 0.032361 | 0.032361 | 0 | 0.00775 | 0.238894 | 5,425 | 171 | 139 | 31.725146 | 0.830467 | 0.112995 | 0 | 0.117647 | 1 | 0 | 0.179183 | 0.005478 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.092437 | null | null | 0.067227 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e5ea03884d399c6f4afc4559e409add279266978 | 2,577 | py | Python | seekr2/tests/test_common_base.py | astokely/seekr2 | 2fd8496dc885339437678a729b1f97a4b0bf9cfd | [
"MIT"
] | null | null | null | seekr2/tests/test_common_base.py | astokely/seekr2 | 2fd8496dc885339437678a729b1f97a4b0bf9cfd | [
"MIT"
] | null | null | null | seekr2/tests/test_common_base.py | astokely/seekr2 | 2fd8496dc885339437678a729b1f97a4b0bf9cfd | [
"MIT"
] | null | null | null | """
test_common_base.py
"""
import pytest
import random
import numpy as np
from parmed import unit
import seekr2.modules.common_base as base
def test_strBool():
assert base.strBool('True') == True
assert base.strBool('true') == True
assert base.strBool('TRUE') == True
assert base.strBool('False') == False
assert base.strBool('false') == False
assert base.strBool('FALSE') == False
with pytest.raises(Exception):
base.strBool('balderdash')
return
def test_order_files_numerically():
string_list = ["/path/to/anchor0/output0_0", "/path/to/anchor0/output0_1",
"/path/to/anchor0/output0_2", "/path/to/anchor0/output1_0",
"/path/to/anchor0/output1_1", "/path/to/anchor0/output1_2",
"/path/to/anchor1/output0_0", "/path/to/anchor1/output0_1",
"/path/to/anchor1/output2_0", "/path/to/anchor1/output10_0"]
desired_list = string_list[:]
random.shuffle(string_list)
ordered_list = base.order_files_numerically(string_list)
for item1, item2 in zip(ordered_list, desired_list):
assert item1==item2
return
def test_box_vectors():
box_vector_q = unit.Quantity(
[[64.0, 0.0, 0.0],
[-21.0, 61.0, 0.0],
[-21.0, -30.0, 53.0]],
unit=unit.angstrom)
box_vector = base.Box_vectors()
box_vector.from_quantity(box_vector_q)
assert np.isclose(box_vector.ax, 6.4)
assert np.isclose(box_vector.ay, 0.0)
assert np.isclose(box_vector.az, 0.0)
assert np.isclose(box_vector.bx, -2.1)
assert np.isclose(box_vector.by, 6.1)
assert np.isclose(box_vector.bz, 0.0)
assert np.isclose(box_vector.cx, -2.1)
assert np.isclose(box_vector.cy, -3.0)
assert np.isclose(box_vector.cz, 5.3)
box_vector_q2 = box_vector.to_quantity()
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[0][0], 6.4)
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[0][1], 0.0)
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[0][2], 0.0)
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[1][0], -2.1)
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[1][1], 6.1)
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[1][2], 0.0)
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[2][0], -2.1)
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[2][1], -3.0)
assert np.isclose(box_vector_q2.value_in_unit(unit.nanometers)[2][2], 5.3)
| 39.045455 | 79 | 0.670935 | 406 | 2,577 | 4.046798 | 0.189655 | 0.131467 | 0.164334 | 0.1972 | 0.578211 | 0.518564 | 0.518564 | 0.389531 | 0.389531 | 0.389531 | 0 | 0.05874 | 0.18083 | 2,577 | 66 | 80 | 39.045455 | 0.719564 | 0.007373 | 0 | 0.037037 | 0 | 0 | 0.116817 | 0.102313 | 0 | 0 | 0 | 0 | 0.462963 | 1 | 0.055556 | false | 0 | 0.092593 | 0 | 0.185185 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e5f2b2cfecb472476c1298a8e4bbdd26fe160c31 | 4,606 | py | Python | gw_bot/api/commands/OSS_Bot_Commands.py | filetrust/GW-Bot | af90e083cc91f1a22dfe5a927882c2cae228f943 | [
"Apache-2.0"
] | 1 | 2020-01-06T11:48:20.000Z | 2020-01-06T11:48:20.000Z | gw_bot/api/commands/OSS_Bot_Commands.py | filetrust/GW-Bot | af90e083cc91f1a22dfe5a927882c2cae228f943 | [
"Apache-2.0"
] | 5 | 2020-02-01T19:18:03.000Z | 2020-03-09T23:40:13.000Z | gw_bot/api/commands/OSS_Bot_Commands.py | filetrust/GW-Bot | af90e083cc91f1a22dfe5a927882c2cae228f943 | [
"Apache-2.0"
] | 1 | 2020-02-27T18:43:11.000Z | 2020-02-27T18:43:11.000Z | from osbot_aws.apis.Lambda import Lambda
from gw_bot.api.Slack_Commands_Helper import Slack_Commands_Helper
from gw_bot.api.commands.Maps_Commands import Maps_Commands
from osbot_utils.utils import Misc
def use_command_class(slack_event, params, target_class):
channel = Misc.get_value(slack_event, 'channel')
user = Misc.get_value(slack_event, 'user')
text,attachments = Slack_Commands_Helper(target_class).invoke(team_id=user, channel=channel, params=params)
if channel:
return None,None
return text,attachments
class OSS_Bot_Commands: # move to separate class
gsbot_version = 'v0.40 (GW Bot)'
@staticmethod
def aws(slack_event=None, params=None):
Lambda('gw_bot.lambdas.aws.commands').invoke_async({'params': params, 'data': slack_event}), []
return None, None
@staticmethod
def browser(slack_event=None, params=None):
Lambda('osbot_browser.lambdas.lambda_browser').invoke_async({'params':params, 'data':slack_event}),[]
return None,None
@staticmethod
def docs(slack_event=None, params=None):
Lambda('osbot_gsuite.lambdas.gdocs').invoke_async({'params': params, 'data': slack_event}), []
return None, None
# @staticmethod
# def dev(slack_event=None, params=None):
# return use_command_class(slack_event, params, Dev_Commands)
@staticmethod
def gw(slack_event=None, params=None):
#return use_command_class(slack_event, params, GW_Commands)
Lambda('gw_bot.lambdas.gw.commands').invoke_async({'params': params, 'data': slack_event}), []
return None, None
@staticmethod
def graph(slack_event, params=None):
Lambda('osbot_jira.lambdas.graph').invoke_async({'params': params, 'data': slack_event}), []
return None, None
@staticmethod
def jira(slack_event, params=None):
Lambda('osbot_jira.lambdas.jira').invoke_async({"params" : params, "user": slack_event.get('user'), "channel": slack_event.get('channel'),
'team_id': slack_event.get('team_id')}, )
return None, None
@staticmethod
def jp(slack_event=None, params=None):
return OSS_Bot_Commands.jupyter(slack_event,params)
@staticmethod
def jupyter(slack_event=None, params=None):
Lambda('osbot_jupyter.lambdas.osbot').invoke_async({'params': params, 'data': slack_event}), []
return None, None
@staticmethod
def hello(slack_event=None, params=None):
user = Misc.get_value(slack_event, 'user')
return 'Hello <@{0}>, how can I help you?'.format(user), []
@staticmethod
def help(*params):
commands = [func for func in dir(OSS_Bot_Commands) if callable(getattr(OSS_Bot_Commands, func)) and not func.startswith("__")]
title = "*Here are the commands available*"
attachment_text = ""
for command in commands:
if command is not 'bad_cmd':
attachment_text += " • {0}\n".format(command)
return title,[{'text': attachment_text, 'color': 'good'}]
@staticmethod
def screenshot(slack_event=None, params=None):
params.insert(0,'screenshot')
Lambda('osbot_browser.lambdas.lambda_browser').invoke_async({'params': params, 'data': slack_event}), []
return None, None
# @staticmethod
# def site(slack_event=None, params=None):
# return use_command_class(slack_event, params, Site_Commands)
@staticmethod
def store(slack_event=None, params=None):
Lambda('gw_bot.lambdas.gw.store.commands').invoke_async({'params': params, 'data': slack_event}), []
return None, None
# @staticmethod
# def faq(slack_event=None, params=None):
# return use_command_class(slack_event, params, FAQ_Commands)
@staticmethod
def maps(slack_event=None, params=None):
return use_command_class(slack_event, params, Maps_Commands)
# @staticmethod
# def participant(slack_event=None, params=None):
# return use_command_class(slack_event,params,Participant_Commands)
#
# @staticmethod
# def schedule(slack_event=None, params=None):
# return use_command_class(slack_event, params, Schedule_Commands)
#
# @staticmethod
# def sessions(slack_event=None, params=None):
# return use_command_class(slack_event, params, Sessions_Commands)
@staticmethod
def version(*params):
return OSS_Bot_Commands.gsbot_version,[]
| 38.066116 | 146 | 0.661528 | 558 | 4,606 | 5.236559 | 0.166667 | 0.143737 | 0.07666 | 0.109514 | 0.554415 | 0.513005 | 0.492129 | 0.439425 | 0.410678 | 0.38193 | 0 | 0.001669 | 0.219496 | 4,606 | 120 | 147 | 38.383333 | 0.810848 | 0.178897 | 0 | 0.383562 | 0 | 0 | 0.136884 | 0.068442 | 0 | 0 | 0 | 0 | 0 | 1 | 0.205479 | false | 0 | 0.054795 | 0.041096 | 0.506849 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
e5f58b94a0c004b47b88bf44aa5c8eada8bda423 | 197 | py | Python | codencounter2/eval.py | saisankargochhayat/algo_quest | a24f9a22c019ab31d56bd5a7ca5ba790d54ce5dc | [
"Apache-2.0"
] | 3 | 2017-02-15T20:55:04.000Z | 2018-09-26T18:48:24.000Z | codencounter2/eval.py | saisankargochhayat/algo_quest | a24f9a22c019ab31d56bd5a7ca5ba790d54ce5dc | [
"Apache-2.0"
] | 4 | 2017-10-07T18:59:20.000Z | 2019-10-08T05:43:25.000Z | codencounter2/eval.py | saisankargochhayat/algo_quest | a24f9a22c019ab31d56bd5a7ca5ba790d54ce5dc | [
"Apache-2.0"
] | 1 | 2017-10-08T06:52:21.000Z | 2017-10-08T06:52:21.000Z | n = int(input())
for i in range(n):
m = input()
try:
evaluation = int(eval(m))
print(evaluation)
except ZeroDivisionError:
print("Division by zero encountered")
| 21.888889 | 45 | 0.588832 | 24 | 197 | 4.833333 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.28934 | 197 | 8 | 46 | 24.625 | 0.828571 | 0 | 0 | 0 | 0 | 0 | 0.142132 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e5fd93e642568bd55df8836bf0a10ba0d3ddce97 | 332 | py | Python | dbLogin.py | shashikarsiddharth/trainee-management-system | de1b917dfda2de31a5d66e9a81954ff46593d838 | [
"Apache-2.0"
] | null | null | null | dbLogin.py | shashikarsiddharth/trainee-management-system | de1b917dfda2de31a5d66e9a81954ff46593d838 | [
"Apache-2.0"
] | null | null | null | dbLogin.py | shashikarsiddharth/trainee-management-system | de1b917dfda2de31a5d66e9a81954ff46593d838 | [
"Apache-2.0"
] | null | null | null | import getpass
def get_db_login_details():
''' Function that takes database login credentials. '''
print("Enter Login Credentials for Database Connectivity")
username = str(input("Enter username:"))
password = getpass.getpass()
database = str(input("Enter database:"))
return (username, password, database)
| 33.2 | 62 | 0.710843 | 37 | 332 | 6.297297 | 0.567568 | 0.137339 | 0.111588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.174699 | 332 | 9 | 63 | 36.888889 | 0.850365 | 0.141566 | 0 | 0 | 0 | 0 | 0.285199 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0.428571 | 0.142857 | 0 | 0.428571 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
f9061078d35c1ef06d1cfce2512bda0d5a2b074a | 160 | py | Python | src/app/utils/consts/langs.py | serious-notreally/cappa | 993a8df35ca6c3b22f3ca811937fd29c07fc71aa | [
"MIT"
] | 9 | 2020-04-05T07:35:55.000Z | 2021-08-03T05:50:05.000Z | src/app/utils/consts/langs.py | serious-notreally/cappa | 993a8df35ca6c3b22f3ca811937fd29c07fc71aa | [
"MIT"
] | 89 | 2020-01-26T11:50:06.000Z | 2022-03-31T07:14:18.000Z | src/app/utils/consts/langs.py | serious-notreally/cappa | 993a8df35ca6c3b22f3ca811937fd29c07fc71aa | [
"MIT"
] | 13 | 2020-03-10T14:45:07.000Z | 2021-07-31T02:43:40.000Z |
PYTHON = "python"
CPP = "cpp"
CSHARP = "csharp"
JAVA = 'java'
CHOICES = (
(PYTHON, 'Python3'),
(CPP, 'C++'),
(CSHARP, 'C#'),
(JAVA, 'Java')
)
| 12.307692 | 24 | 0.48125 | 17 | 160 | 4.529412 | 0.411765 | 0.207792 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008475 | 0.2625 | 160 | 12 | 25 | 13.333333 | 0.644068 | 0 | 0 | 0 | 0 | 0 | 0.220126 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f90b526b5cca62f752ff84cb1c84d5a96960251e | 614 | py | Python | services/server/routers/comments.py | 0x0elliot/prep-project-4.1.2 | c6cd02813bba709e9896e9a78a4b8bd93386e924 | [
"MIT"
] | 1 | 2021-10-12T05:08:20.000Z | 2021-10-12T05:08:20.000Z | services/server/routers/comments.py | 0x0elliot/prep-project-4.1.2 | c6cd02813bba709e9896e9a78a4b8bd93386e924 | [
"MIT"
] | 20 | 2021-10-08T23:35:10.000Z | 2021-10-31T07:17:20.000Z | services/server/routers/comments.py | 0x0elliot/prep-project-4.1.2 | c6cd02813bba709e9896e9a78a4b8bd93386e924 | [
"MIT"
] | 9 | 2021-10-09T21:46:26.000Z | 2021-10-31T06:00:19.000Z | from fastapi import APIRouter, Depends, HTTPException
from db.crud import get_db
import schemas
from sqlalchemy.orm import Session
from db import crud, models
from verify import get_current_user
router = APIRouter()
@router.post('/', response_model=schemas.Comment)
async def email_subscribe(comment: schemas.CommentBase, db: Session = Depends(get_db), user: models.User = Depends(get_current_user)):
place = crud.get_place_by_id(db, comment.place_id)
if place is None:
raise HTTPException(status_code=404, detail="Place invalid")
return crud.create_comment(db, user, place, comment.body)
| 32.315789 | 134 | 0.773616 | 88 | 614 | 5.238636 | 0.477273 | 0.02603 | 0.060738 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00566 | 0.136808 | 614 | 18 | 135 | 34.111111 | 0.864151 | 0 | 0 | 0 | 0 | 0 | 0.022801 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.461538 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
f90be812a9d16768674575cd17d0d77d34a76e73 | 10,097 | py | Python | Minor Project1/Mind_Space_Server/restmindapi/emotion.py | manas-16/MindSpace | e6cee8c8d16d97003ebda1913f186bd4e21f68e4 | [
"MIT"
] | null | null | null | Minor Project1/Mind_Space_Server/restmindapi/emotion.py | manas-16/MindSpace | e6cee8c8d16d97003ebda1913f186bd4e21f68e4 | [
"MIT"
] | null | null | null | Minor Project1/Mind_Space_Server/restmindapi/emotion.py | manas-16/MindSpace | e6cee8c8d16d97003ebda1913f186bd4e21f68e4 | [
"MIT"
] | null | null | null | """import nltk
import re
#import numpy as np
#import pandas as pd
from nltk.corpus import stopwords
from nltk.stem import *
#from textblob.classifiers import NaiveBayesClassifier
#from sklearn.cross_validation import KFold
from nltk.classify.naivebayes import NaiveBayesClassifier
#from llda import LLDA
#from word_prob_dist import word_distribution
from optparse import OptionParser
# In[3]:
'''
Reading the Dataset (ISEAR Dataset)
'''
Data = pd.read_csv('my_table.csv',header=None)
'''
36 - Class Label
40 - Sentence
'''
# In[4]:
'''
Emotion Labels
'''
emotion_labels = ['joy', 'fear', 'anger', 'sadness', 'disgust', 'shame', 'guilt']
# In[5]:
'''
Negation words
'''
negation_words = ['not', 'neither', 'nor', 'but', 'however', 'although', 'nonetheless', 'despite', 'except', 'even though', 'yet']
# In[6]:
'''
Returns a list of all corresponding class labels
'''
def class_labels(emotions):
labels = []
labelset = []
for e in emotions:
labels.append(e)
labelset.append([e])
return labels, labelset
# In[7]:
'''
Removes unnecessary characters from sentences
'''
def removal(sentences):
sentence_list = []
count = 0
for sen in sentences:
count += 1
# print count
# print sen
# print type(sen)
s = nltk.word_tokenize(sen)
characters = ["á", "\xc3", "\xa1", "\n", ",", "."]
new = ' '.join([i for i in s if not [e for e in characters if e in i]])
sentence_list.append(new)
return sentence_list
# In[8]:
'''
POS-TAGGER, returns NAVA words
'''
def pos_tag(sentences):
tags = [] #have the pos tag included
nava_sen = []
for s in sentences:
s_token = nltk.word_tokenize(s)
pt = nltk.pos_tag(s_token)
nava = []
nava_words = []
for t in pt:
if t[1].startswith('NN') or t[1].startswith('JJ') or t[1].startswith('VB') or t[1].startswith('RB'):
nava.append(t)
nava_words.append(t[0])
tags.append(nava)
nava_sen.append(nava_words)
return tags, nava_sen
# In[9]:
'''
Performs stemming
'''
def stemming(sentences):
sentence_list = []
sen_string = []
sen_token = []
stemmer = PorterStemmer()
# i = 0
for sen in sentences:
# print i,
# i += 1
st = ""
for word in sen:
word_l = word.lower()
if len(word_l) >= 3:
st += stemmer.stem(word_l) + " "
sen_string.append(st)
w_set = nltk.word_tokenize(st)
sen_token.append(w_set)
w_text = nltk.Text(w_set)
sentence_list.append(w_text)
return sentence_list, sen_string, sen_token
# In[10]:
'''
Write to file
'''
def write_to_file(filename, text):
o = open(filename,'w')
o.write(str(text))
o.close()
# In[11]:
'''
Creating the dataframe
'''
def create_frame(Data):
emotions = Data[36]
sit = Data[40]
labels, labelset = class_labels(emotions[1:])
sent = removal(sit[1:])
nava, sent_pt = pos_tag(sent)
sentences, sen_string, sen_token = stemming(sent_pt)
frame = pd.DataFrame({0 : labels,
1 : sentences,
2 : sen_string,
3 : sen_token,
4 : labelset})
return frame
# In[12]:
c = create_frame(Data)
# In[20]:
'''
Reads the emotion representative words file
'''
def readfile(filename):
f = open(filename,'r')
representative_words = []
for line in f.readlines():
characters = ["\n", " ", "\r", "\t"]
new = ''.join([i for i in line if not [e for e in characters if e in i]])
representative_words.append(new)
return representative_words
# In[21]:
'''
Makes a list of all words semantically related to an emotion and Stemming
'''
def affect_wordlist(words):
affect_words = []
stemmer = PorterStemmer()
for w in words:
w_l = w.lower()
word_stem = stemmer.stem(w_l)
if word_stem not in affect_words:
affect_words.append(word_stem)
return affect_words
# In[22]:
'''
Creating an emotion wordnet
'''
def emotion_word_set(emotions):
word_set = {}
for e in emotions:
representative_words = readfile(e)
wordlist = affect_wordlist(representative_words)
word_set[e] = wordlist
return word_set
# In[23]:
'''
Lexicon based approach - Check for lexicons
'''
def lexicon_based(sentences, word_set):
text_vector = []
for sen in sentences:
s_vector = []
for word in sen:
w_vector = {}
for emo in word_set:
if word in word_set[emo]:
# print word
try:
if emo not in w_vector[word]:
w_vector[word].append(emo)
except KeyError:
w_vector[word] = [emo]
if w_vector:
s_vector.append(w_vector)
if not s_vector:
text_vector.append(s_vector)
else:
text_vector.append(s_vector)
return text_vector
# In[24]:
'''
Lexicon based approach - Classify based on lexicons
'''
def classify_lexicon(text_vector, labels, emotion_labels):
count = 0
total = 0
for j in range(len(text_vector)):
sen = text_vector[j]
sen_emo = np.empty(len(emotion_labels))
sen_emo.fill(0)
if sen:
total += 1
w_emo = []
for word in sen:
emotions = word.values()[0][0]
# print emotions, type(emotions), j
w_emo.append(emotions)
i = emotion_labels.index(emotions)
sen_emo[i] += 1
# print sen_emo
winner = np.argwhere(sen_emo == np.amax(sen_emo))
indices = winner.flatten().tolist()
for i in indices:
if emotion_labels[i] == labels[j]:
count += 1
break
# else:
# print j, text_vector[j]
accuracy = count/len(text_vector)
tot_accuracy = count/total
return accuracy, tot_accuracy
# In[25]:
e = emotion_word_set(emotion_labels)
l = lexicon_based(c[1],e)
a, b = classify_lexicon(l, c[0], emotion_labels)
# In[26]:
'''
Calculate pmi
'''
def pmi(x, y, sentences):
count_x = 1
count_y = 1
count_xy = 1
for sen in sentences:
if x and y in sentences:
count_xy += 1
count_x += 1
count_y += 1
if x in sentences:
count_x += 1
if y in sentences:
count_y += 1
result = count_xy/(count_x * count_y)
return result
# In[27]:
print a*100, '%'
print b*100, "%"
# In[ ]:
# In[20]:
'''
Getting synonyms from wordnet synsets
'''
from nltk.corpus import wordnet as wn
jw = wn.synsets('shame')
for s in jw:
v = s.name()
print wn.synset(v).lemma_names()
# In[28]:
'''
Creating training/testing set for Naive Bayes classifier TextBlob
'''
def create_dataset_textblob(sentences, emotions):
train = []
sen = []
emo = []
for s in sentences:
sen.append(s)
for e in emotions:
emo.append(e)
for i in range(len(sen)):
s = sen[i]
e = emo[i]
train.append((str(s), e))
return train
# In[29]:
'''
Testing for Naive Bayes Classifier
'''
def testing(cl, test):
print cl.classify('angry')
for s, e in test:
r = cl.classify(s)
print s, e, r
if r == e:
print "*"
# In[30]:
'''
Create dataset for nltk Naive Bayes
'''
def create_data(sentence, emotion):
data = []
for i in range(len(sentence)):
sen = []
for s in sentence[i]:
sen.append(str(s))
emo = emotion[i]
data.append((sen, emo))
return data
# In[31]:
'''
Get all words in dataset
'''
def get_words_in_dataset(dataset):
all_words = []
for (words, sentiment) in dataset:
all_words.extend(words)
return all_words
# In[32]:
'''
Getting frequency dist of words
'''
def get_word_features(wordlist):
wordlist = nltk.FreqDist(wordlist)
word_features = wordlist.keys()
return word_features
# In[33]:
'''
Extacting features
'''
def extract_features(document):
document_words = set(document)
features = {}
for word in word_features:
features['contains(%s)' % word] = (word in document_words)
return features
# In[34]:
'''
Create test data
'''
def create_test(sentence, emotion):
data = []
sen = []
emo = []
for s in sentence:
sen.append(str(s))
for e in emotion:
emo.append(e)
for i in range(len(sen)):
temp = []
temp.append(sen[i])
temp.append(emo[i])
data.append(temp)
return data
# In[35]:
'''
Classifier
'''
def classify_dataset(data):
return classifier.classify(extract_features(nltk.word_tokenize(data)))
# In[36]:
'''
Get accuracy
'''
def get_accuracy(test_data, classifier):
total = accuracy = float(len(test_data))
for data in test_data:
if classify_dataset(data[0]) != data[1]:
accuracy -= 1
print('Total accuracy: %f%% (%d/20).' % (accuracy / total * 100, accuracy))
# # In[37]:
# # Create training and testing data
# sen = c[3]
# emo = c[0]
# l = len(c[3])
# limit = (9*l)//10
# sente = c[2]
# Data = create_data(sen[:limit], emo[:limit])
# test_data = create_test(sente[limit:], emo[limit:])
# # In[38]:
# # extract the word features out from the training data
# word_features = get_word_features( get_words_in_dataset(Data))
# # In[39]:
# # get the training set and train the Naive Bayes Classifier
# training_set = nltk.classify.util.apply_features(extract_features, Data)
# classifier = NaiveBayesClassifier.train(training_set)
# # In[40]:
# get_accuracy(test_data, classifier)
# In[19]:
b = word_distribution(emotion_labels,c[1],c[0])
o = open('emotion_words.txt','w')
o.write(str(b))
o.close()
# In[ ]:
""" | 20.522358 | 130 | 0.575914 | 1,331 | 10,097 | 4.236664 | 0.199098 | 0.020748 | 0.006384 | 0.012059 | 0.071289 | 0.039723 | 0.019507 | 0.019507 | 0.019507 | 0.009931 | 0 | 0.017832 | 0.294642 | 10,097 | 492 | 131 | 20.522358 | 0.77394 | 0.998217 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f9127d6cb2b0629a4da0bb03629602256b80458a | 3,149 | py | Python | app/models.py | nickobrad/thoughts | 128e5648a167e7f2dd9cd18391a48624fc79d2db | [
"MIT"
] | null | null | null | app/models.py | nickobrad/thoughts | 128e5648a167e7f2dd9cd18391a48624fc79d2db | [
"MIT"
] | null | null | null | app/models.py | nickobrad/thoughts | 128e5648a167e7f2dd9cd18391a48624fc79d2db | [
"MIT"
] | null | null | null | from . import db
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from . import login_manager
from datetime import datetime, date
@login_manager.user_loader
def load_user(userName):
return User.query.get(str(userName))
class User(UserMixin, db.Model):
def get_id(self):
return (self.username)
__tablename__ = 'users'
username = db.Column(db.String(255),unique = True, primary_key = True)
firstname = db.Column(db.String(255))
secondname = db.Column(db.String(255))
email = db.Column(db.String(255), unique = True, index = True)
profile_picture = db.Column(db.String())
profile_bio = db.Column(db.String(255))
secured_password = db.Column(db.String(255))
pitches = db.relationship('Pitch', backref = 'moto', lazy = 'dynamic')
commentsByMe = db.relationship('PitchComment', backref = 'userzs', lazy = 'dynamic')
@property
def password(self):
raise AttributeError('You cannot view a users password')
@password.setter
def password(self, password):
self.secured_password = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.secured_password, password)
class Category(db.Model):
__tablename__ = 'cats'
id = db.Column(db.Integer, primary_key = True)
category_name = db.Column(db.String(255))
pitch = db.relationship('Pitch', backref = 'categ', lazy = "dynamic")
class Pitch(db.Model):
__tablename__ = 'pitch'
id = db.Column(db.Integer, primary_key = True)
pitch = db.Column(db.String)
categoryOfPitch = db.Column(db.Integer, db.ForeignKey("cats.id"))
date_posted = db.Column(db.DateTime, default = date.today)
user = db.Column(db.String, db.ForeignKey("users.username"))
upvote = db.Column(db.Integer, default = 0)
downvote = db.Column(db.Integer, default = 0)
comments = db.relationship('PitchComment', backref = 'pitch', lazy = "dynamic")
def save_pitch(self):
db.session.add(self)
db.session.commit()
def delete_pitch(self):
db.session.delete(self)
db.sesion.commit()
@classmethod
def pitch_by_id(cls, id):
pitches = Pitch.query.filter_by(id = id).first()
return pitches
@classmethod
def all_pitches(cls, inputUserName):
pitches = Pitch.query.filter_by(user = inputUserName).all()
return pitches
class PitchComment(db.Model):
__tablename__ = 'pitchcomments'
id = db.Column(db.Integer, primary_key = True)
pitch_id = db.Column(db.Integer, db.ForeignKey("pitch.id"))
comment = db.Column(db.String)
user = db.Column(db.String, db.ForeignKey("users.username"))
date_posted = db.Column(db.DateTime, default = date.today)
def save_comment(self):
db.session.add(self)
db.session.commit()
def delete_comment(self):
db.session.delete(self)
db.sesion.commit()
@classmethod
def all_comments(cls, inputUser):
comments = PitchComment.query.filter_by(user = inputUser).all()
return comments
| 30.872549 | 88 | 0.67704 | 399 | 3,149 | 5.205514 | 0.235589 | 0.080886 | 0.101107 | 0.092441 | 0.382764 | 0.312951 | 0.257583 | 0.229658 | 0.21377 | 0.091478 | 0 | 0.009113 | 0.198476 | 3,149 | 101 | 89 | 31.178218 | 0.813788 | 0 | 0 | 0.266667 | 1 | 0 | 0.058487 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.16 | false | 0.12 | 0.066667 | 0.04 | 0.746667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
f925bd2141d9dfdef8c095f82e2c176a69b0931f | 1,446 | py | Python | src/molecule/text.py | gardar/molecule | 756fc33512a294ebbdb1e0de8aac2dabb642609e | [
"MIT"
] | 1,599 | 2015-11-18T01:40:26.000Z | 2018-10-29T16:42:52.000Z | src/molecule/text.py | gardar/molecule | 756fc33512a294ebbdb1e0de8aac2dabb642609e | [
"MIT"
] | 1,232 | 2015-11-18T16:56:02.000Z | 2018-10-27T03:51:50.000Z | src/molecule/text.py | gardar/molecule | 756fc33512a294ebbdb1e0de8aac2dabb642609e | [
"MIT"
] | 290 | 2015-11-19T18:16:41.000Z | 2018-10-29T18:09:13.000Z | """Text utils."""
import re
def camelize(string):
"""Format string as camel-case."""
# NOTE(retr0h): Taken from jpvanhal/inflection
# https://github.com/jpvanhal/inflection
return re.sub(r"(?:^|_)(.)", lambda m: m.group(1).upper(), string)
def chomp(text: str) -> str:
"""Remove any training spaces from string."""
return "\n".join([x.rstrip() for x in text.splitlines()])
def strip_ansi_escape(data):
"""Remove all ANSI escapes from string or bytes.
If bytes is passed instead of string, it will be converted to string
using UTF-8.
"""
if isinstance(data, bytes):
data = data.decode("utf-8")
return re.sub(r"\x1b[^m]*m", "", data)
def strip_ansi_color(data):
"""Remove ANSI colors from string or bytes."""
if isinstance(data, bytes):
data = data.decode("utf-8")
# Taken from tabulate
invisible_codes = re.compile(r"\x1b\[\d*m")
return re.sub(invisible_codes, "", data)
def underscore(string):
"""Format string to underlined notation."""
# NOTE(retr0h): Taken from jpvanhal/inflection
# https://github.com/jpvanhal/inflection
string = re.sub(r"([A-Z]+)([A-Z][a-z])", r"\1_\2", string)
string = re.sub(r"([a-z\d])([A-Z])", r"\1_\2", string)
string = string.replace("-", "_")
return string.lower()
def title(word: str) -> str:
"""Format title."""
return " ".join(x.capitalize() or "_" for x in word.split("_"))
| 26.777778 | 72 | 0.616183 | 208 | 1,446 | 4.225962 | 0.403846 | 0.028441 | 0.027304 | 0.043231 | 0.350398 | 0.309443 | 0.284414 | 0.245734 | 0.245734 | 0.156997 | 0 | 0.010336 | 0.197095 | 1,446 | 53 | 73 | 27.283019 | 0.74677 | 0.339557 | 0 | 0.190476 | 0 | 0 | 0.103104 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.047619 | 0 | 0.619048 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
00575582185a8401a4598644b2c13926103bac64 | 3,328 | py | Python | catalog/admin.py | yashkgp/Med-Manage | 9ef1890a7d6c801fa0e96646223cead950a2433f | [
"CC0-1.0"
] | 2 | 2018-08-05T20:29:44.000Z | 2020-09-12T19:25:35.000Z | catalog/admin.py | yashkgp/Med-Manage | 9ef1890a7d6c801fa0e96646223cead950a2433f | [
"CC0-1.0"
] | null | null | null | catalog/admin.py | yashkgp/Med-Manage | 9ef1890a7d6c801fa0e96646223cead950a2433f | [
"CC0-1.0"
] | null | null | null | from django.contrib import admin
# Register your models here.
from .models import Author, Genre, Book, BookInstance, Language, Patient, Prescription, Medicine, Med_data ,Doctor
admin.site.register(Book)
admin.site.register(Author)
admin.site.register(BookInstance)
admin.site.register(Genre)
admin.site.register(Language)
admin.site.register(Doctor)
admin.site.register(Patient)
admin.site.register(Prescription)
admin.site.register(Medicine)
admin.site.register(Med_data)
# class BooksInline(admin.TabularInline):
# """
# Defines format of inline book insertion (used in AuthorAdmin)
# """
# model = Book
# class AuthorAdmin(admin.ModelAdmin):
# """
# Administration object for Author models.
# Defines:
# - fields to be displayed in list view (list_display)
# - orders fields in detail view (fields), grouping the date fields horizontally
# - adds inline addition of books in author view (inlines)
# """
# list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
# fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
# inlines = [BooksInline]
# class BooksInstanceInline(admin.TabularInline):
# """
# Defines format of inline book instance insertion (used in BookAdmin)
# """
# model = BookInstance
# class BookAdmin(admin.ModelAdmin):
# """
# Administration object for Book models.
# Defines:
# - fields to be displayed in list view (list_display)
# - adds inline addition of book instances in book view (inlines)
# """
# list_display = ('title', 'author', 'display_genre')
# inlines = [BooksInstanceInline]
# class BookInstanceAdmin(admin.ModelAdmin):
# """
# Administration object for BookInstance models.
# Defines:
# - fields to be displayed in list view (list_display)
# - filters that will be displayed in sidebar (list_filter)
# - grouping of fields into sections (fieldsets)
# """
# list_display = ('book', 'status', 'borrower','due_back', 'id')
# list_filter = ('status', 'due_back')
# fieldsets = (
# (None, {
# 'fields': ('book','imprint', 'id')
# }),
# ('Availability', {
# 'fields': ('status', 'due_back','borrower')
# }),
# )
# class DoctorsInline(admin.TabularInline):
# """
# Defines format of inline book insertion (used in AuthorAdmin)
# """
# model = Doctor
# class DoctorAdmin(admin.ModelAdmin):
# """
# Administration object for Author models.
# Defines:
# - fields to be displayed in list view (list_display)
# - orders fields in detail view (fields), grouping the date fields horizontally
# - adds inline addition of books in author view (inlines)
# """
# list_display = ('last_name', 'first_name','address', 'specialization', 'hospital')
# fields = ['first_name', 'last_name', ('address', 'specialization', 'hospital')]
# inlines = [DoctorsInline]
# class PatientInLine(admin.TabularInline):
# model = Patient
# class PatientAdmin (admin.ModelAdmin):
# list_display = ('last_name', 'first_name','address', 'specialization', 'hospital')
# fields = ['first_name', 'last_name', ('address', 'specialization', 'hospital')]
# inlines = [PatientInLine] | 32.950495 | 114 | 0.655048 | 357 | 3,328 | 6.002801 | 0.246499 | 0.041997 | 0.079328 | 0.065329 | 0.517032 | 0.472702 | 0.472702 | 0.430238 | 0.430238 | 0.430238 | 0 | 0 | 0.210637 | 3,328 | 101 | 115 | 32.950495 | 0.815759 | 0.817007 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.166667 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
005aa0eb6d8ff62cdaf7ee9eb3ba11b0cbb4f1a9 | 142 | py | Python | modules/bot_commands/template_module.py | boss-space/Nuclearbot-v-0.5.0 | 9565794c08e7af12557982bb87dfcc2f0c7194a3 | [
"MIT"
] | 2 | 2021-01-10T21:00:43.000Z | 2021-01-21T12:33:11.000Z | modules/bot_commands/template_module.py | boss-space/Nuclearbot-v-0.5.0 | 9565794c08e7af12557982bb87dfcc2f0c7194a3 | [
"MIT"
] | null | null | null | modules/bot_commands/template_module.py | boss-space/Nuclearbot-v-0.5.0 | 9565794c08e7af12557982bb87dfcc2f0c7194a3 | [
"MIT"
] | 2 | 2021-02-16T14:06:52.000Z | 2021-05-01T22:06:43.000Z | from modules.bot_commands.for_commands import *
__ver__ = 'X.X'
print (f': {__name__}.py {__ver__}')
add_module (__name__, __ver__)
| 17.75 | 48 | 0.690141 | 19 | 142 | 3.947368 | 0.736842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.169014 | 142 | 7 | 49 | 20.285714 | 0.635593 | 0 | 0 | 0 | 0 | 0 | 0.208955 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00601f44d28aebc83e9406f72f79ded7b3d504c5 | 135 | py | Python | client/example/example.py | shinolab/autd_old | 68b9b65b34eb3020e286eae1d5d2222e7de79292 | [
"MIT"
] | 4 | 2016-12-01T07:21:37.000Z | 2020-02-02T15:13:51.000Z | client/example/example.py | shinolab/autd_old | 68b9b65b34eb3020e286eae1d5d2222e7de79292 | [
"MIT"
] | null | null | null | client/example/example.py | shinolab/autd_old | 68b9b65b34eb3020e286eae1d5d2222e7de79292 | [
"MIT"
] | null | null | null | from pyautd3 import Pyautd
ctl = Pyautd()
#ctl.open("127.0.0.1")
ctl.add_device([0,0,0], [0,0,0])
ctl.focal_point([0,0,0])
ctl.stop() | 19.285714 | 32 | 0.651852 | 29 | 135 | 2.965517 | 0.482759 | 0.186047 | 0.174419 | 0.139535 | 0.069767 | 0 | 0 | 0 | 0 | 0 | 0 | 0.131148 | 0.096296 | 135 | 7 | 33 | 19.285714 | 0.57377 | 0.155556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0061ec3e463495cee420cdfceb1e8d222b9adb23 | 15,682 | py | Python | bot/core/command_gates/raid_gate.py | Gliger13/bdo_daily_bot | d569405fcae1978c2bb1ac34d1f75936040a3552 | [
"MIT"
] | null | null | null | bot/core/command_gates/raid_gate.py | Gliger13/bdo_daily_bot | d569405fcae1978c2bb1ac34d1f75936040a3552 | [
"MIT"
] | null | null | null | bot/core/command_gates/raid_gate.py | Gliger13/bdo_daily_bot | d569405fcae1978c2bb1ac34d1f75936040a3552 | [
"MIT"
] | null | null | null | """
Module contain classes for checking raids commands successful conditions
"""
from datetime import datetime
from typing import Optional
from discord.ext.commands import Context
from core.command_gates.common import log_gate_check_failed, log_raid_gate_check_failed
from core.command_gates.gate import CommandsGate
from core.command_gates.raid_picker import RaidPicker
from core.commands.registration_controller import RegistrationController
from core.guild_managers.raids_keeper import RaidsKeeper
from core.models.context import ContextInterface
from core.raid.raid import Raid
from core.raid.raid_item import RaidItem
from core.raid.raid_member import RaidMember
from core.users_interactor.senders import UsersSender
from core.users_interactor.users_choices import UsersChoicer
from messages import messages
class RaidGate:
"""
Class for checking raid commands correctness
"""
@classmethod
async def can_user_join_raid(cls, ctx: ContextInterface, user: RaidMember, raid: Optional[Raid]) -> bool:
"""
Check user can join raid
:param ctx: discord listener context to check
:param user: user to check
:param raid: raid to check
:return: True if user can join given raid else False
"""
return await CommandsGate.check_user_registered(user) and \
await cls.check_raid_to_join_or_leave_exist(ctx, user, raid) and \
await cls.check_raid_is_full(ctx, user, raid) and \
await cls.check_user_not_in_raid(ctx, user, raid) and \
await cls.check_user_not_in_same_raid(ctx, user, raid)
@classmethod
async def can_user_leave_raid(cls, ctx: ContextInterface, user: RaidMember, raid: Raid) -> bool:
"""
Check user can leave raid
:param ctx: discord listener context to check
:param user: user wrapper to check
:param raid: raid to check
:return: True if user can leave given raid else False
"""
return await CommandsGate.check_user_registered(user) and \
await cls.check_raid_to_join_or_leave_exist(ctx, user, raid) and \
await cls.check_user_in_raid(ctx, user, raid)
@classmethod
async def can_user_create_raid(cls, ctx: Context, user: RaidMember, raid_item: RaidItem) -> bool:
"""
Check user can create raid with given raid item
:param ctx: discord command context to check
:param user: user wrapper to check
:param raid_item: raid item to check
:return: True if user can create raid else False
"""
if not await CommandsGate.check_user_registered(user):
await RegistrationController.register(user.user, raid_item.captain_name)
await RegistrationController.register_captain(user.user, raid_item.captain_name)
return await cls.check_user_same_raid_not_exist(ctx, user, raid_item) and \
(await cls.check_user_raids_not_exist(raid_item) or
await cls.check_user_want_create_another_raid(ctx, user, raid_item))
@classmethod
async def pick_and_check_raid(cls, ctx: Context, user_initiator: RaidMember,
captain: RaidMember, time_leaving: Optional[datetime]) -> Optional[Raid]:
"""
Check user can remove raid
Check user can remove raid. If raids are many, then ask user what raid he want to remove.
:param ctx: discord command context to check
:param user_initiator: user wrapper which entered the command
:param captain: captain to check
:param time_leaving: list of user raids to check
:return: raid that user want to remove
"""
if await cls.check_captain_exist(ctx, user_initiator, captain):
if raid := await RaidPicker.pick_raid(ctx, user_initiator, captain, time_leaving):
return raid
return None
@classmethod
async def can_user_close_reservation(cls, ctx: Context, user_initiator: RaidMember, captain: RaidMember,
time_leaving: Optional[datetime], places: int) -> Optional[Raid]:
"""
Check user can close the given raid reservation places and return raid to close reservation places
:param ctx: discord command context to check
:param user_initiator: user who entered the command
:param captain: captain of the raid to close places
:param time_leaving: time leaving of the raid to close places
:param places: places to close in raid to check
:return: raid if checks passed
"""
if await cls.check_captain_exist(ctx, user_initiator, captain):
if raid := await RaidPicker.pick_raid(ctx, user_initiator, captain, time_leaving):
if await cls.check_user_can_close_raid_places(ctx, user_initiator, raid, places):
return raid
return None
@classmethod
async def can_user_open_reservation(cls, ctx: Context, user_initiator: RaidMember, captain: RaidMember,
time_leaving: Optional[datetime], places: int) -> Optional[Raid]:
"""
Check user can open the given raid reservation places and return raid to close reservation places
:param ctx: discord command context to check
:param user_initiator: user who entered the command
:param captain: captain of the raid to close places
:param time_leaving: time leaving of the raid to close places
:param places: places to close in raid to check
:return: raid if checks passed
"""
if await cls.check_captain_exist(ctx, user_initiator, captain):
if raid := await RaidPicker.pick_raid(ctx, user_initiator, captain, time_leaving):
if await cls.check_user_can_open_raid_places(ctx, user_initiator, raid, places):
return raid
return None
@classmethod
async def check_user_same_raid_not_exist(cls, ctx: Context, user: RaidMember, raid_item: RaidItem) -> bool:
"""
Check user don't have raid with given raid attributes
:param ctx: discord command context to check
:param user: user wrapper to check
:param raid_item: raid attributes to create raid
:return: True if user don't have raid with given raid attributes else False
"""
if RaidsKeeper.has_raid_with_raid_item(raid_item):
log_gate_check_failed(ctx, "User didn't create raid. Raid with the given attributes already exist")
await UsersSender.send_raid_to_remove_not_exist(user.user)
return False
return True
@classmethod
async def check_user_raids_not_exist(cls, raid_item: RaidItem) -> bool:
"""
Check user don't have any raid
:param raid_item: user raid item to create raid
:return: True if user don't have any raid else False
"""
return not RaidsKeeper.get_raids_by_captain_name(raid_item.captain_name)
@classmethod
async def check_user_want_create_another_raid(cls, ctx: Context, user: RaidMember, raid_item: RaidItem) -> bool:
"""
Check user want to create another raid
:param ctx: discord command context to check
:param user: user wrapper to check
:param raid_item: raid attributes to create raid
:return: True if user want to create another raid else False
"""
ask_message = messages.raid_exist_warning
ask_message += RaidsKeeper.get_captain_raids_message(raid_item.captain_name)
if not await UsersChoicer.ask_yes_or_no(user.user, ask_message):
log_gate_check_failed(ctx, "User didn't create raid. User doesn't want to create another")
return False
return True
@classmethod
async def check_raid_to_join_or_leave_exist(cls, ctx: ContextInterface, user: RaidMember,
raid: Optional[Raid]) -> bool:
"""
Check raid exist
:param ctx: discord listener context to check
:param user: user that trying action
:param raid: raid to check
:return: True if raid exist else False
"""
if not raid:
log_gate_check_failed(ctx, "User didn't join or leave raid. Raid not exist")
await UsersSender.send_user_try_action_with_not_exist_raid(user.user)
return False
return True
@classmethod
async def check_user_not_in_raid(cls, ctx: ContextInterface, user: RaidMember, raid: Raid) -> bool:
"""
Check user not in the given raid
:param ctx: discord listener context to check
:param user: user wrapper to check
:param raid: raid to check
:return: True if user not in given raid else False
"""
if raid.has_member(user):
log_raid_gate_check_failed(ctx, raid, "User didn't join raid. Already in")
await UsersSender.send_user_already_in_raid(user.user, raid)
return False
return True
@classmethod
async def check_user_in_raid(cls, ctx: ContextInterface, user: RaidMember, raid: Raid) -> bool:
"""
Check user in the given raid
:param ctx: discord listener context to check
:param user: user wrapper to check
:param raid: raid to check
:return: True if user in given raid else False
"""
if not raid.has_member(user):
log_raid_gate_check_failed(ctx, raid, "User not in.")
# await UsersSender.send_user_not_in_raid(user.user, raid)
return False
return True
@classmethod
async def check_user_not_in_same_raid(cls, ctx: ContextInterface, user: RaidMember, raid: Raid) -> bool:
"""
Check user not in the raids with given time leaving
:param ctx: discord listener context to check
:param user: user wrapper to check
:param raid: raid to check
:return: True if user not in the raids with given time leaving else False
"""
if RaidsKeeper.has_member_on_same_time(user, raid.time.time_leaving):
logging_message = f"User didn't join raid. Already in same raid with time {raid.time.time_leaving}"
log_raid_gate_check_failed(ctx, raid, logging_message)
await UsersSender.send_user_already_in_same_raid(user.user)
return False
return True
@classmethod
async def check_raid_is_full(cls, ctx: ContextInterface, user: RaidMember, raid: Raid) -> bool:
"""
Check raid is not full in which user try to join
:param ctx: discord listener context to check
:param user: user wrapper to check
:param raid: raid to check
:return: True if raid is not full else False
"""
if raid.is_full:
log_raid_gate_check_failed(ctx, raid, "User didn't join raid. Raid is full")
await UsersSender.send_raid_is_full(user.user, raid)
return False
return True
@classmethod
async def check_captain_exist(cls, ctx: Context, user_initiator: RaidMember, captain: RaidMember) -> bool:
"""
Check raid to show exist
:param ctx: discord command context to check
:param user_initiator: user who entered the command
:param captain: captain of the raid
:return: True if captain exist else False
"""
if not captain:
log_gate_check_failed(ctx, f"User try to interact with not exist captain {captain.nickname}")
await UsersSender.send_to_user_captain_not_exist(user_initiator.user, captain.nickname)
return False
return True
@classmethod
async def check_raid_places_in_range(cls, ctx: Context, user_initiator: RaidMember,
raid: Raid, places: int) -> bool:
"""
Check the given raid places in available range
:param ctx: discord command context to check
:param user_initiator: user who entered the command
:param raid: raid to check
:param places: raid places to check
:return: True if raid places in available range else False
"""
if not raid.MAX_RAID_MEMBERS_AMOUNT > places > 0:
log_raid_gate_check_failed(ctx, raid, "User use the raid places not from the available range")
await UsersSender.send_user_raid_places_not_in_range(user_initiator.user)
return False
return True
@classmethod
async def check_raid_places_is_not_zero(cls, ctx: Context, user_initiator: RaidMember,
raid: Raid, places: int) -> bool:
"""
Check the given raid places is not zero
:param ctx: discord command context to check
:param user_initiator: user who entered the command
:param raid: raid to check
:param places: raid places to check
:return: True is the give raid places is not zero else False
"""
if places == 0:
log_raid_gate_check_failed(ctx, raid, "User entered raid places with 0 value")
await UsersSender.send_user_raid_places_is_zero(user_initiator.user)
return False
return True
@classmethod
async def check_user_can_close_raid_places(cls, ctx: Context, user_initiator: RaidMember,
raid: Raid, places: int) -> bool:
"""
Check user can close reservation places in the given raid with the given places
:param ctx: discord command context to check
:param user_initiator: user who entered the command
:param raid: raid to check
:param places: raid places to check
:return: True if user can close reservation places in the given raid else False
"""
if places < 0:
await UsersSender.send_user_use_negative_raid_places(user_initiator.user)
return await cls.check_user_can_close_raid_places(ctx, user_initiator, raid, abs(places))
if await cls.check_raid_places_is_not_zero(ctx, user_initiator, raid, places) and \
await cls.check_raid_places_in_range(ctx, user_initiator, raid, places):
if places > raid.places_left:
await UsersSender.send_user_wrong_raid_places(user_initiator.user)
return False
return True
@classmethod
async def check_user_can_open_raid_places(cls, ctx: Context, user_initiator: RaidMember,
raid: Raid, places: int) -> bool:
"""
Check user can open reservation places in the given raid with the given places
:param ctx: discord command context to check
:param user_initiator: user who entered the command
:param raid: raid to check
:param places: raid places to check
:return: True if user can open reservation places in the given raid else False
"""
if places < 0:
await UsersSender.send_user_use_negative_raid_places(user_initiator.user)
return await cls.check_user_can_close_raid_places(ctx, user_initiator, raid, abs(places))
if await cls.check_raid_places_is_not_zero(ctx, user_initiator, raid, places) and \
await cls.check_raid_places_in_range(ctx, user_initiator, raid, places):
if places >= raid.reservation_count:
await UsersSender.send_user_wrong_raid_places(user_initiator.user)
return False
return True
| 44.174648 | 116 | 0.661969 | 2,067 | 15,682 | 4.835027 | 0.074988 | 0.03292 | 0.038423 | 0.034221 | 0.786872 | 0.744247 | 0.689013 | 0.677807 | 0.669101 | 0.624375 | 0 | 0.000441 | 0.277133 | 15,682 | 354 | 117 | 44.299435 | 0.881175 | 0.011223 | 0 | 0.48125 | 0 | 0 | 0.050057 | 0.002477 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.09375 | 0 | 0.325 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
006b60ee44439162a278319a3cd963d428a6dc00 | 243 | py | Python | output/models/nist_data/list_pkg/id/schema_instance/nistschema_sv_iv_list_id_length_1_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/nist_data/list_pkg/id/schema_instance/nistschema_sv_iv_list_id_length_1_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/nist_data/list_pkg/id/schema_instance/nistschema_sv_iv_list_id_length_1_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from output.models.nist_data.list_pkg.id.schema_instance.nistschema_sv_iv_list_id_length_1_xsd.nistschema_sv_iv_list_id_length_1 import (
NistschemaSvIvListIdLength1,
Out,
)
__all__ = [
"NistschemaSvIvListIdLength1",
"Out",
]
| 24.3 | 137 | 0.794239 | 31 | 243 | 5.580645 | 0.645161 | 0.138728 | 0.16185 | 0.208092 | 0.312139 | 0.312139 | 0.312139 | 0 | 0 | 0 | 0 | 0.018779 | 0.123457 | 243 | 9 | 138 | 27 | 0.793427 | 0 | 0 | 0 | 0 | 0 | 0.123457 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00731de39ee9fbb29e88b94ad10dcc358c2a6f8f | 409 | py | Python | leicaexperiment/__init__.py | arve0/leicaexperiment | c0393c4d51984a506f813319efb66e54c4f2a426 | [
"MIT"
] | 1 | 2020-01-21T08:34:52.000Z | 2020-01-21T08:34:52.000Z | leicaexperiment/__init__.py | arve0/leicaexperiment | c0393c4d51984a506f813319efb66e54c4f2a426 | [
"MIT"
] | null | null | null | leicaexperiment/__init__.py | arve0/leicaexperiment | c0393c4d51984a506f813319efb66e54c4f2a426 | [
"MIT"
] | 2 | 2018-03-05T17:36:46.000Z | 2020-01-20T14:47:44.000Z | __author__ = 'Arve Seljebu'
__email__ = 'arve.seljebu@gmail.com'
from os.path import join, dirname
__version__ = open(join(dirname(__file__), 'VERSION')).read().strip()
__all__ = ['Experiment', 'compress', 'decompress',
'attribute', 'attribute_as_str', 'attributes']
from .experiment import (Experiment, compress, decompress,
attribute, attribute_as_str, attributes)
| 37.181818 | 69 | 0.682152 | 42 | 409 | 6.071429 | 0.595238 | 0.086275 | 0.219608 | 0.290196 | 0.478431 | 0.478431 | 0.478431 | 0.478431 | 0 | 0 | 0 | 0 | 0.183374 | 409 | 10 | 70 | 40.9 | 0.763473 | 0 | 0 | 0 | 0 | 0 | 0.254279 | 0.05379 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
007dfdc13eabf58bcfea8d58851cebe4b92c0c1d | 493 | py | Python | Dataset/Leetcode/valid/7/612.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/valid/7/612.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/valid/7/612.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | class Solution:
def XXX(self, x: int) -> int:
n1 = str(x)
n2 = ''
n3 = ''
lis1 = []
print(type(n1))
for i in n1 :
if i == '-' or i == '+':
n3 = i
continue
else:
lis1.append(i)
for k in XXXd(lis1):
n2 = n2 + k
number = n3 + n2
if -2**31< int(number) <2**31-1:
return(int(number))
else:
return 0
| 22.409091 | 40 | 0.340771 | 57 | 493 | 2.947368 | 0.526316 | 0.035714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090517 | 0.529412 | 493 | 21 | 41 | 23.47619 | 0.633621 | 0 | 0 | 0.1 | 0 | 0 | 0.004065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.05 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0098bbb091bd804d438138d3cbd69c10d9a7698d | 3,196 | py | Python | j.py | inyour77/GBLKcerew | 39829a8f827849c17ffa2faa0523f09f7cbdbd8a | [
"MIT"
] | null | null | null | j.py | inyour77/GBLKcerew | 39829a8f827849c17ffa2faa0523f09f7cbdbd8a | [
"MIT"
] | null | null | null | j.py | inyour77/GBLKcerew | 39829a8f827849c17ffa2faa0523f09f7cbdbd8a | [
"MIT"
] | null | null | null | #Compiled InYoyurXerXez7
#2e4hTeam
#Kiya
import os
import sys
import time
from time import sleep
g = "\033[32;1m"
gt = "\033[0;32m"
bt = "\033[34;1m"
b = "\033[36;1m"
m = "\033[31;1m"
c = "\033[0m"
p = "\033[37;1m"
u = "\033[35;1m"
M = "\033[3;1m"
k = "\033[33;1m"
kt = "\033[0;33m"
a = "\033[30;1m"
W = "\x1b[0m"
R = "\x1b[31m"
G = "\x1b[1;32m"
O = "\x1b[33m"
B = "\x1b[34m"
P = "\x1b[35m"
C = "\x1b[36m"
GR = "\x1b[37m"
def slowprints(s):
for c in s + '\n':
sys.stdout.write(c)
sys.stdout.flush()
time.sleep(2.0/90)
os.system("clear")
gilang=(a+"""
==============<·~~~~~~~~~~~~>·==============
| Author : InYourXerXez7 |
| Team : Buft~2e4h GOBLOG CREW |
| ThankTo: My Friends && Allah |
| Codex :https://github.com/gillanggans7|
| Contack: @XerXezOficial |
==============<·~~~~~~~~~~~~>·==============
""")
gil=(a+"""
<|>\\\\\\\\\\\\\\\\|//////////////////<|>
\ \x1b[35m AKU BUKAN BADUT PENGHIBUR \033[30;1m/
/ \x1b[35m DISAAT PANGERAN MENGHILANG \033[30;1m\
\ \x1b[35m ~TAKUR \033[30;1m
<|>\\\\\\\\\\\\\\\\|//////////////////<|>
""")
tol=(c+"""
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
| \x1b[35m Lakukanlah apa saja yang membuat bahagia \033[0;1m|
| \x1b[35m tapi inget semua ada pertanggung jawabnnya\033[0;1m|
| \x1b[35m dan semoga pembalasan cepet datangnya! :')\033[0;1m|
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
""")
lol=(c+"""
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
| \x1b[35m Jangan Bersedih, Kamu bisa \033[0;1m|
| \x1b[35m Meluk dirimu Sendiri :) Takur!\033[0;1m|
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
""")
sat=(c+"""
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
| \x1b[35m Jika hanya sebatas janji bisa membuat tenang \033[0;1m|
| \x1b[35mlantas mau sampai kapan berbohong membuat. \033[0;1m|
| \x1b[35m bahagia?!! \033[0;1m|
| \x1b[35m ~TaKuR :') \033[0;1m|
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
""")
bang=(c+"""
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
| \x1b[35m Dari pada menjadi teman yang palsu \033[0;1m|
| \x1b[35m lebih baik menjadi musuh yang nyata!\033[0;1m|
| \x1b[35m ~TaKuR :')\033[0;1m|
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
""")
jing=(c+"""
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
| \x1b[35m Bisakah kamu merindukanku kembali?\033[0;1m|
| \x1b[35m Aku Tidak kuat merindu sendirian. \033[0;1m|
| \x1b[35m ~TaKuR :')\033[0;1m|
{|}~~\~~~|~~~\~~~\~~~\~~~\~~~\~~~\~~~\~~~|~~~{|}
""")
os.system("pkg update && upgrade")
os.system("pkg install ruby")
os.system("gem install lolcat")
sleep(4)
os.system("figlet XerXez7")
sleep(5)
os.system("clear")
print(gilang)
input('\n \033[1;31mEnTerUnTukLanjut~$ ')
print(gil)
input('\n \033[1;31mEnTerUnTukLanjut~$ ')
slowprints(tol)
input('\n \033[1;31mEnTerUnTukLanjut~$ ')
slowprints(lol)
input('\n \033[1;31mEnTerUnTukLanjut~$ ')
slowprints(sat)
input('\n \033[1;31mEnTerUnTukLanjut~$ ')
slowprints(bang)
input('\n \033[1;31mEnTerUnTukLanjut~$ ')
slowprints(jing)
input('\n \033[1;31mEnTerUnTukLanjut~$ ')
sleep(20)
os.system("exit")
| 25.568 | 66 | 0.473404 | 385 | 3,196 | 3.94026 | 0.415584 | 0.071193 | 0.059328 | 0.059328 | 0.291365 | 0.191167 | 0.045485 | 0.045485 | 0.045485 | 0 | 0 | 0.111485 | 0.163642 | 3,196 | 124 | 67 | 25.774194 | 0.454545 | 0.010951 | 0 | 0.317308 | 0 | 0 | 0.760925 | 0.267891 | 0 | 0 | 0 | 0 | 0 | 1 | 0.009615 | false | 0 | 0.038462 | 0 | 0.048077 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00a034b9897e06fedbe5f159c7609e73e7926b5c | 624 | py | Python | io_scene_xray/ui/edit_helper.py | clayne/blender-xray | 84d5d52049ec9e22c85ba8544995bd39c3a83e55 | [
"BSD-2-Clause"
] | 93 | 2016-12-02T14:42:18.000Z | 2022-03-23T08:15:41.000Z | io_scene_xray/ui/edit_helper.py | clayne/blender-xray | 84d5d52049ec9e22c85ba8544995bd39c3a83e55 | [
"BSD-2-Clause"
] | 276 | 2018-07-04T20:13:22.000Z | 2022-03-31T09:13:37.000Z | io_scene_xray/ui/edit_helper.py | clayne/blender-xray | 84d5d52049ec9e22c85ba8544995bd39c3a83e55 | [
"BSD-2-Clause"
] | 31 | 2018-07-04T20:03:17.000Z | 2022-01-27T18:37:36.000Z | # blender modules
import bpy
# addon modules
from . import base
from .. import edit_helpers
class XRAY_PT_edit_helper_object(base.XRayPanel):
bl_context = 'object'
bl_label = base.build_label('Edit Helper')
@classmethod
def poll(cls, context):
return edit_helpers.base.get_object_helper(context) is not None
def draw(self, context):
helper = edit_helpers.base.get_object_helper(context)
helper.draw(self.layout, context)
def register():
bpy.utils.register_class(XRAY_PT_edit_helper_object)
def unregister():
bpy.utils.unregister_class(XRAY_PT_edit_helper_object)
| 22.285714 | 71 | 0.737179 | 86 | 624 | 5.069767 | 0.395349 | 0.091743 | 0.075688 | 0.103211 | 0.355505 | 0.355505 | 0.169725 | 0 | 0 | 0 | 0 | 0 | 0.174679 | 624 | 27 | 72 | 23.111111 | 0.846602 | 0.046474 | 0 | 0 | 0 | 0 | 0.028716 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.1875 | 0.0625 | 0.6875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
00b3eedb581a21261a8e644bc9d9b5ec9ce8f30e | 8,412 | py | Python | Profiles/ldap.py | natedooley/profiles | c2d0642f69166797abf3f22475f90717ddec094a | [
"Apache-2.0"
] | null | null | null | Profiles/ldap.py | natedooley/profiles | c2d0642f69166797abf3f22475f90717ddec094a | [
"Apache-2.0"
] | 1 | 2018-04-10T01:54:42.000Z | 2018-04-10T01:54:42.000Z | Profiles/ldap.py | natedooley/profiles | c2d0642f69166797abf3f22475f90717ddec094a | [
"Apache-2.0"
] | null | null | null | import hashlib, requests, ldap
from functools import lru_cache
from Profiles import _ldap
from csh_ldap import CSHMember
def _ldap_get_group_members(group):
return _ldap.get_group(group).get_members()
def _ldap_is_member_of_group(member, group):
group_list = member.get("memberOf")
for group_dn in group_list:
if group == group_dn.split(",")[0][3:]:
return True
return False
def _ldap_add_member_to_group(account, group):
if not _ldap_is_member_of_group(account, group):
_ldap.get_group(group).add_member(account, dn=False)
def _ldap_remove_member_from_group(account, group):
if _ldap_is_member_of_group(account, group):
_ldap.get_group(group).del_member(account, dn=False)
@lru_cache(maxsize=1024)
def _ldap_is_member_of_directorship(account, directorship):
directors = _ldap.get_directorship_heads(directorship)
for director in directors:
if director.uid == account.uid:
return True
return False
#Getters
def ldap_get_member(username):
return _ldap.get_member(username, uid=True)
@lru_cache(maxsize=1024)
def ldap_get_active_members():
return _ldap_get_group_members("active")
@lru_cache(maxsize=1024)
def ldap_get_intro_members():
return _ldap_get_group_members("intromembers")
@lru_cache(maxsize=1024)
def ldap_get_onfloor_members():
return _ldap_get_group_members("onfloor")
@lru_cache(maxsize=1024)
def ldap_get_current_students():
return _ldap_get_group_members("current_student")
@lru_cache(maxsize=1024)
def ldap_get_all_members():
return _ldap_get_group_members("member")
@lru_cache(maxsize=1024)
def ldap_get_groups(account):
group_list = account.get("memberOf")
groups = []
for group_dn in group_list:
groups.append(group_dn.split(",")[0][3:])
return groups
@lru_cache(maxsize=1024)
def ldap_get_eboard():
members = _ldap_get_group_members("eboard-chairman") + _ldap_get_group_members("eboard-evaluations") + _ldap_get_group_members("eboard-financial") + _ldap_get_group_members("eboard-history") + _ldap_get_group_members("eboard-imps") + _ldap_get_group_members("eboard-opcomm") + _ldap_get_group_members("eboard-research") + _ldap_get_group_members("eboard-social") + _ldap_get_group_members("eboard-secretary")
return members
# Status checkers
def ldap_is_active(account):
return _ldap_is_member_of_group(account, 'active')
def ldap_is_alumni(account):
# If the user is not active, they are an alumni.
return not _ldap_is_member_of_group(account, 'active')
def ldap_is_eboard(account):
return _ldap_is_member_of_group(account, 'eboard')
def ldap_is_rtp(account):
return _ldap_is_member_of_group(account, 'rtp')
def ldap_is_intromember(account):
return _ldap_is_member_of_group(account, 'intromembers')
def ldap_is_onfloor(account):
return _ldap_is_member_of_group(account, 'onfloor')
def ldap_is_current_student(account):
return _ldap_is_member_of_group(account, 'current_student')
# Directorships
def ldap_is_financial_director(account):
return _ldap_is_member_of_directorship(account, 'financial')
def ldap_is_eval_director(account):
return _ldap_is_member_of_directorship(account, 'evaluations')
def ldap_is_chairman(account):
return _ldap_is_member_of_directorship(account, 'chairman')
def ldap_is_history(account):
return _ldap_is_member_of_directorship(account, 'history')
def ldap_is_imps(account):
return _ldap_is_member_of_directorship(account, 'imps')
def ldap_is_social(account):
return _ldap_is_member_of_directorship(account, 'Social')
def ldap_is_rd(account):
return _ldap_is_member_of_directorship(account, 'research')
# Setters
def ldap_set_housingpoints(account, housing_points):
account.housingPoints = housing_points
ldap_get_current_students.cache_clear()
ldap_get_member.cache_clear()
def ldap_set_roomnumber(account, room_number):
if room_number == "":
room_number = None
account.roomNumber = room_number
ldap_get_current_students.cache_clear()
ldap_get_member.cache_clear()
def ldap_set_active(account):
_ldap_add_member_to_group(account, 'active')
ldap_get_active_members.cache_clear()
ldap_get_member.cache_clear()
def ldap_set_inactive(account):
_ldap_remove_member_from_group(account, 'active')
ldap_get_active_members.cache_clear()
ldap_get_member.cache_clear()
def ldap_set_current_student(account):
_ldap_add_member_to_group(account, 'current_student')
ldap_get_current_students.cache_clear()
ldap_get_member.cache_clear()
def ldap_set_non_current_student(account):
_ldap_remove_member_from_group(account, 'current_student')
ldap_get_current_students.cache_clear()
ldap_get_member.cache_clear()
def ldap_update_profile(dict, uid):
account = _ldap.get_member(uid, uid=True)
if not dict["name"] == account.cn or dict["name"] == None or dict["name"] == "None":
account.cn = dict["name"]
elif dict["name"] == None or dict["name"] == "None":
account.cn = None
if not dict["birthday"] == account.birthday or dict["birthday"] == None or dict["birthday"] == "None":
account.birthday = dict["birthday"]
elif dict["birthday"] == None or dict["birthday"] == "None":
account.birthday = None
if not dict["phone"] == account.mobile or dict["phone"] ==None or dict["phone"] == "None":
account.mobile = dict["phone"]
elif dict["phone"] == None or dict["phone"] == "None":
account.mobile = None
if not dict["plex"] == account.plex or dict["plex"] == None or dict["plex"] == "None":
account.plex = dict["plex"]
elif dict["plex"] == None or dict["plex"] == "None":
account.plex = None
if not dict["major"] == account.major or dict["major"] == None or dict["major"] == "None":
account.major = dict["major"]
elif dict["major"] == None or dict["major"] == "None":
account.major = None
if not dict["ritYear"] == account.ritYear or dict["ritYear"] == None or dict["ritYear"] == "None":
account.ritYear = dict["ritYear"]
elif dict["ritYear"] == None or dict["ritYear"] =="None":
account.ritYear = None
if not dict["website"] == account.homepageURL or dict["website"] ==None or dict["website"] == "None":
account.homepageURL = dict["website"]
elif dict["website"] == None or dict["website"] == "None":
account.homepageURL = None
if not dict["github"] == account.github or dict["github"] ==None or dict["github"] == "None":
account.github = dict["github"]
elif dict["github"] == None or dict["github"] == "None":
account.github = None
if not dict["twitter"] == account.twitterName or dict["twitter"] ==None or dict["twitter"] == "None":
account.twitterName = dict["twitter"]
elif dict["twitter"] == None or dict["twitter"] == "None":
account.twitterName = None
if not dict["blog"] == account.blogURL or dict["blog"] ==None or dict["blog"] == "None":
account.blogURL = dict["blog"]
elif dict["blog"] == None or dict["blog"] == "None":
account.blogURL = None
if not dict["google"] == account.googleScreenName or dict["google"] == None or dict["google"] == "None":
account.googleScreenName = dict["google"]
elif dict["google"] == None or dict["google"] == "None":
account.googleScreenName = None
account.blogURL = None
con = _ldap.get_con()
ldap_mod = ldap.MOD_DELETE
key = blogURL
mod = (ldap_mod, key)
mod_attrs = [mod]
con.modify_s(account.get_dn(), mod_attrs)
def ldap_get_roomnumber(account):
try:
return account.roomNumber
except AttributeError:
return ""
@lru_cache(maxsize=1024)
def ldap_search_members(query):
con = _ldap.get_con()
filt = str("(|(description=*{0}*)(displayName=*{0}*)(mail=*{0}*)(nickName=*{0}*)(plex=*{0}*)(sn=*{0}*)(uid=*{0}*)(mobile=*{0}*)(twitterName=*{0}*)(github=*{0}*))").format(query)
res= con.search_s(
"dc=csh,dc=rit,dc=edu",
ldap.SCOPE_SUBTREE,
filt,
['uid'])
ret = []
for uid in res:
try:
mem = (str(uid[1]).split('\'')[3])
ret.append(ldap_get_member(mem))
except IndexError:
continue
return ret
# @lru_cache(maxsize=1024)
def get_image(uid):
return ldap_get_member(uid).jpegPhoto
@lru_cache(maxsize=1024)
def get_gravatar(uid):
addr = uid + "@csh.rit.edu"
url = "https://gravatar.com/avatar/" + hashlib.md5(addr.encode('utf8')).hexdigest() +".jpg?d=mm&s=250"
return url
| 27.490196 | 412 | 0.71184 | 1,168 | 8,412 | 4.816781 | 0.13613 | 0.057234 | 0.039104 | 0.044792 | 0.558478 | 0.510132 | 0.439033 | 0.384465 | 0.306612 | 0.115357 | 0 | 0.009076 | 0.148597 | 8,412 | 305 | 413 | 27.580328 | 0.776459 | 0.014028 | 0 | 0.179894 | 0 | 0.005291 | 0.12586 | 0.01798 | 0 | 0 | 0 | 0 | 0 | 1 | 0.201058 | false | 0 | 0.021164 | 0.116402 | 0.391534 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
00b570d275fe7022693ba42290982ab289921740 | 167 | py | Python | openbabel/pybel-example/pybel-example.py | AaltoScienceIT/hpc-software-tests | 3a93d6fd2937e0a62bf7837ab00a8944bdce6f15 | [
"MIT"
] | null | null | null | openbabel/pybel-example/pybel-example.py | AaltoScienceIT/hpc-software-tests | 3a93d6fd2937e0a62bf7837ab00a8944bdce6f15 | [
"MIT"
] | null | null | null | openbabel/pybel-example/pybel-example.py | AaltoScienceIT/hpc-software-tests | 3a93d6fd2937e0a62bf7837ab00a8944bdce6f15 | [
"MIT"
] | null | null | null | # encoding: utf-8
from openbabel import pybel
mol = pybel.readstring("smi", "CC(=O)Br")
mol.make3D()
print(mol.write("sdf"))
mol.draw(show=False,filename='test.png')
| 20.875 | 41 | 0.706587 | 27 | 167 | 4.37037 | 0.851852 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013158 | 0.08982 | 167 | 7 | 42 | 23.857143 | 0.763158 | 0.08982 | 0 | 0 | 0 | 0 | 0.146667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.2 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00bd1ce72673da58b4d5c31e30f101a905d1b527 | 732 | py | Python | tests/test_win_native_backend.py | dspmandavid/urh | 30643c1a68634b1c97eb9989485a4e96a3b038ae | [
"Apache-2.0"
] | 1 | 2017-06-21T02:37:16.000Z | 2017-06-21T02:37:16.000Z | tests/test_win_native_backend.py | dspmandavid/urh | 30643c1a68634b1c97eb9989485a4e96a3b038ae | [
"Apache-2.0"
] | null | null | null | tests/test_win_native_backend.py | dspmandavid/urh | 30643c1a68634b1c97eb9989485a4e96a3b038ae | [
"Apache-2.0"
] | null | null | null | import unittest
import sys
class TestWhitening(unittest.TestCase):
def test_native_backends_installed(self):
if sys.platform == "win32":
import os
cur_dir = os.path.dirname(__file__) if not os.path.islink(__file__) else os.path.dirname(
os.readlink(__file__))
dll_dir = os.path.realpath(os.path.join(cur_dir, "..", "src", "urh", "dev", "native", "lib", "win"))
os.environ['PATH'] = dll_dir + ';' + os.environ['PATH']
# noinspection PyUnresolvedReferences
from urh.dev.native.lib import hackrf
# noinspection PyUnresolvedReferences
from urh.dev.native.RTLSDR import RTLSDR
self.assertTrue(True)
| 33.272727 | 112 | 0.61612 | 84 | 732 | 5.142857 | 0.488095 | 0.069444 | 0.083333 | 0.069444 | 0.231481 | 0.231481 | 0 | 0 | 0 | 0 | 0 | 0.003711 | 0.263661 | 732 | 21 | 113 | 34.857143 | 0.797774 | 0.096995 | 0 | 0 | 0 | 0 | 0.056231 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 1 | 0.076923 | false | 0 | 0.384615 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
00c1839de8c000295dee27f90f30dddee8253df8 | 59,095 | py | Python | src/csvwrpr/csvwrpr.py | hendrikdutoit/CsvWrpr | 0c44f82482f0bb8bd2a7093cc20a30db6bdd2509 | [
"MIT"
] | null | null | null | src/csvwrpr/csvwrpr.py | hendrikdutoit/CsvWrpr | 0c44f82482f0bb8bd2a7093cc20a30db6bdd2509 | [
"MIT"
] | 1 | 2022-01-18T20:38:50.000Z | 2022-01-18T20:45:48.000Z | src/csvwrpr/csvwrpr.py | hendrikdutoit/CsvWrpr | 0c44f82482f0bb8bd2a7093cc20a30db6bdd2509 | [
"MIT"
] | null | null | null | '''Wrapper for csv files
Insert description here
'''
import os
import sys
import logging
from pathlib import Path
import displayfx
import beetools
_VERSION = '1.14.0'
_path = Path(sys.argv[0])
_name = _path.stem
class CsvWrpr:
'''Wrpr for CSV Files
This module copies a csv file into a different structure. It returns
the same csv file in a different structure i.e. list, dict or tuple.
It also manipulates the header of the original csv file by either
deleting it, adding a header or replacing it.
'''
def __init__(
self,
p_parent_logger_name,
p_csv_file_name,
p_key1='',
p_key2='',
p_data_delimiter='',
p_header_delimiter='',
p_header='',
p_del_head=False,
p_struc_type={},
p_csv_corr_str_file_name='',
p_replace_header=False,
p_subset_range=[],
p_verbose=False,
p_convert_none=True,
p_bar_len=beetools.BAR_LEN,
p_msg_width=beetools.MSG_LEN,
p_match_nr_of_fields=False,
):
'''Initialize the class
Parameters
- p_parent_logger_name
Name of the parent logger
- p_csv_file_name
Name of the file to be manipulated
- p_key1 = ''
It is only mandatory for Dict structures. It will be used as the key.
- p_key2 = ''
It is only mandatory for 2 dimensional Dict structures. It is the
key in the second dimension.
- delimiter = ''
Specifies the delimiter of the csv file. If it is specified, it
will try to determine the delimiter from the first row in the csv
file from the delimiter_list.
- p_header = ''
Actual header that will be used in a List format
If "header" is empty, it assumes the first row in the csv file
contains the header fields.
- Lists and Tuples
None
- Dict
Will be used for the keys of the dictionary.
- p_del_head = ''
- List and Tuple
Assumes the first row contains header fields and the first row
will be deleted. Practically, if "p_del_head" contains any value,
the first row will be removed.
- Dict
It assumes a header was present in the original csv file and
is obsolete becbeetools.e the dictionary keys contains the header
fields. It will look for a key in the Dict with the value of
"p_del_head" and delete this record from the Dict.
- p_struc_type = ''
- "List" will return the csv file in a list with all values in
string format i.e [['',''],['','']].
- "db" return the csv file in a Tuple with all values in string
format i.e [('',''),('','')]. This is intended for integration
with database modules where the csv file must be imported into
a database table.
- "Dict" or any other value will result in a Dict with the header
as the keys to the Dict
- p_csv_corr_str_file_name = ''
In certain cases the csv file contain undesirable characters for
instance a name and surname combination separated by a comma. It
is actually one field and must be read by as one filed but becbeetools.e
the delimiter of the scv file is also a comma, it reads it as two
separate fields. This parameter gives the opportunity to change
such strings to something that will execute to desire.
- p_replace_header = ''
This cbeetools.es the header from the csv file to be replaced by the
"header" parameter i.e. the header of the original csv file is
replaced.
- p_subset_range[ left, right ]
There are two parameters in this list. The left parameter is the
start and right the end. Only records where the value of p_key1
is equal or between these two values will be included. The
implication is that the p_key1 parameter must exist and left and
right must be numbers.
- p_verbose = False
Suppress printing progress to screen
- p_convert_none = True
When "True" it will convert any field containing the word "NULL"
or "None" to type None, alternatively it keeps "NULL" and "None" as
the string content of the field
- p_bar_len = 50
Value passed to DisplayFx for the progress bar len
- p_msg_width
Value passed to beetools.msg_display to set the maximum width for a message
'''
self.logger_name = '{}.{}'.format(p_parent_logger_name, _name)
self.logger = logging.getLogger(self.logger_name)
self.logger.info('Start')
self.version = _VERSION
self.success = False
self.bar_len = p_bar_len
self.msg_width = p_msg_width
self.combined_field = ''
self.convert_none = p_convert_none
self.corr_str_list = []
self.csv_corr_str_file_name = p_csv_corr_str_file_name
self.csv_file_fame = p_csv_file_name
self.data_delimiter = p_data_delimiter
self.del_head = p_del_head
self.delimiter_list = [',', ';', '~', '|']
self.head = ''
self.header = p_header
self.header_delimiter = p_header_delimiter
self.key1 = p_key1
self.key2 = p_key2
self.match_nr_of_fields = p_match_nr_of_fields
self.nr_of_rows = 0
self.replace_header = p_replace_header
self.silent = p_verbose
self.struc_type = p_struc_type
self.subset_range = p_subset_range
self.tail = ''
self.t_tow = ''
self.read_csv_corr_str_file()
(self.head, self.tail) = os.path.split(self.csv_file_fame)
if self.key2 == '':
if self.struc_type == [] or self.struc_type == ():
self.csv_db = []
else:
self.csv_db = {}
self.read_one_key_csv()
else:
self.delhead = False
self.struc_type = []
self.csv_db = []
self.read_two_key_csv()
# end __init__
def append(
self,
p_append_path,
p_key1='',
p_key2='',
p_data_delimiter='',
p_header_delimiter='',
p_header='',
p_del_head=False,
p_csv_corr_str_file_name='',
p_replace_header=False,
p_subset_range=[],
):
'''Apend another csv file to the current data.
Parameters
- p_append_path
Name of the file to be added
- p_key1 = ''
It is only mandatory for Dict structures. It will be used as the key.
- p_key2 = ''
It is only mandatory for 2 dimensional Dict structures. It is the
key in the second dimension.
- p_data_delimiter = ''
Specifies the delimiter of the csv file. If it is specified, it
will try to determine the delimiter from the first row in the csv
file from the delimiter_list.
- p_header_delimiter = ''
Specifies the delimiter of the header if different from the
rest of the file or due to reading it from a different source
- p_header = ''
Actual header that will be used in a List format
If "header" is empty, it assumes the first row in the csv file
contains the header fields.
- p_del_head = ''
- List and Tuple
Assumes the first row contains header fields and the first row
will be deleted. Practically, if "p_del_head" contains any value,
the first row will be removed.
- Dict
It assumes a header was present in the original csv file and
is obsolete becbeetools.e the dictionary keys contains the header
fields. It will look for a key in the Dict with the value of
"p_del_head" and delete this record from the Dict.
- struc_type = ''
- "List" will return the csv file in a list with all values in
string format i.e [['',''],['','']].
- "db" return the csv file in a Tuple with all values in string
format i.e [('',''),('','')]. This is intended for integration
with database modules where the csv file must be imported into
a database table.
- "Dict" or any other value will result in a Dict with the header
as the keys to the Dict
- p_csv_corr_str_file_name = ''
In certain cases the csv file contain undesirable characters for
instance a name and surname combination separated by a comma. It
is actually one field and must be read by as one filed but becbeetools.e
the delimiter of the scv file is also a comma, it reads it as two
separate fields. This parameter gives the opportunity to change
such strings to something that will execute to desire.
- p_replace_header = ''
This cbeetools.es the header from the csv file to be replaced by the
"header" parameter i.e. the header of the original csv file is
replaced.
- p_subset_range[ left, right ]
There are two parameters in this list. The left parameter is the
start and right the end. Only records where the value of p_key1
is equal or between these two values will be included. The
implication is that the p_key1 parameter must exist and left and
right must be numbers.
'''
csv_to_append = CsvWrpr(
_name,
p_append_path,
p_key1=p_key1,
p_key2=p_key2,
p_data_delimiter=p_data_delimiter,
p_header_delimiter=p_header_delimiter,
p_header=p_header,
p_del_head=p_del_head,
p_struc_type=self.struc_type,
p_csv_corr_str_file_name=p_csv_corr_str_file_name,
p_replace_header=p_replace_header,
p_subset_range=p_subset_range,
)
if isinstance(self.csv_db, dict):
self.csv_db.update(csv_to_append.csv_db)
self.nr_of_rows = len(self.csv_db)
# end append
def export(self, p_export_file_name, p_export_delimiter='|'):
'''Export all of the current records.'''
if isinstance(self.csv_db, (list, tuple)):
export_file = open(
p_export_file_name, 'w+', encoding='utf-8', errors='ignore'
)
for i in range(len(self.csv_db)):
export_str = p_export_delimiter.join(map(str, self.csv_db[i])) + '\n'
export_file.write(export_str)
export_file.close()
elif isinstance(self.csv_db, dict):
del self.csv_db[self.key1]
export_list = self.csv_db.keys()
self.export_sub_set(p_export_file_name, export_list, p_export_delimiter)
pass
# end export
def export_sub_set(
self, p_export_file_name, p_sub_set_list, p_export_delimiter='|'
):
'''Description'''
export_file = open(p_export_file_name, 'w+', encoding='utf-8', errors='ignore')
export_str = p_export_delimiter.join(map(str, self.header)) + '\n'
export_file.write(export_str)
list_len = len(p_sub_set_list)
export_qty = 0
msg = beetools.msg_display(
'Writing subset {} ({})'.format(
os.path.split(p_export_file_name)[1], list_len
),
p_len=self.msg_width,
)
dfx = displayfx.DisplayFx(
self.logger_name,
list_len,
p_msg=msg,
p_verbose=self.silent,
p_bar_len=self.bar_len,
)
# This loop was not tested for a tuple and have to be corrected if found to be not working
if isinstance(self.csv_db, (list, tuple)):
field_pos = self.header.index(self.key1)
for dfx_cntr, sub_set_id in enumerate(p_sub_set_list):
# Traverse the entire list becbeetools.e there might be more that fulfill the criteria
# in a list of tuple scenario.
for csv_db_row in self.csv_db:
if sub_set_id == csv_db_row[field_pos]:
export_str = (
p_export_delimiter.join(map(str, csv_db_row)) + '\n'
)
export_file.write(export_str)
export_qty += 1
dfx.update(dfx_cntr)
elif isinstance(self.csv_db, dict):
for dfx_cntr, sub_set_id in enumerate(p_sub_set_list):
export_tow = []
if sub_set_id in self.csv_db:
for field in self.header:
export_tow.append(self.csv_db[sub_set_id][field])
export_file.write(
p_export_delimiter.join(map(str, export_tow)) + '\n'
)
export_qty += 1
dfx.update(dfx_cntr)
export_file.close()
return export_qty
# end export_sub_set
def read_csv_corr_str_file(self):
'''Parameters'''
if self.csv_corr_str_file_name:
corr_file = open(
self.csv_corr_str_file_name, 'r', encoding='utf-8', errors='ignore'
)
raw_corr_data = corr_file.readlines()
corr_file.close()
for row in raw_corr_data:
self.corr_str_list.append(row[:-1].split('~'))
# end read_csv_corr_str_file
def read_one_key_csv(self):
'''Description'''
def fix_row(p_row):
'''Description'''
def adjust_delimiters(p_row):
'''Split the row in fields'''
row = p_row
row = row.split(self.data_delimiter)
return row
# end adjust_delimiters
def replace_contents(p_row):
'''Replace the line target string with correct details'''
row = p_row
for corr in self.corr_str_list:
row = p_row.replace(corr[0], corr[1])
return row
# end replace_contents
row = replace_contents(p_row)
row = adjust_delimiters(row)
if not self.t_tow:
self.combined_field = ''
new_row = []
else:
new_row = self.t_tow
self.combined_field = self.combined_field[:-1]
for i, field in enumerate(row):
if self.combined_field:
field = self.combined_field + self.data_delimiter + field
self.combined_field = ''
if field[:1] == '"':
if field[-1:] == '"' and len(field) > 1:
new_row.append(field[1:-1])
else:
self.combined_field = field
else:
new_row.append(field)
if self.convert_none:
for i, field in enumerate(new_row):
if field in ['NULL', 'None']:
new_row[i] = None
if self.combined_field:
self.t_tow = new_row
else:
self.t_tow = ''
return new_row
# end fix_row
def get_delimiter():
'''Description'''
if self.data_delimiter == '':
delimiter_cntr = 0
delimiter_pos = 0
for i, delimiter in enumerate(self.delimiter_list):
if (
str(raw_csv_file_data[0]).count(self.delimiter_list[i])
> delimiter_cntr
):
delimiter_cntr = str(raw_csv_file_data[0]).count(
self.delimiter_list[i]
)
delimiter_pos = i
self.data_delimiter = self.delimiter_list[delimiter_pos]
if not self.header_delimiter:
self.header_delimiter = self.data_delimiter
pass
# end getDelimeter
def append_row_to_list(p_row):
'''Append the corrected row to a list structure'''
if key1_index is not None:
if p_row[key1_index].isnumeric():
if not self.subset_range or (
int(p_row[key1_index]) >= self.subset_range[0]
and int(p_row[key1_index]) <= self.subset_range[1]
):
self.csv_db.append(p_row)
else:
self.csv_db.append(p_row)
else:
self.csv_db.append(p_row)
pass
# end append_row_to_list
def append_row_to_db(p_row):
'''Append the corrected row to a db (tuple) structure'''
csv_row = tuple(p_row)
if key1_index is not None:
if csv_row[key1_index].isnumeric():
if not self.subset_range or (
int(csv_row[key1_index]) >= self.subset_range[0]
and int(csv_row[key1_index]) <= self.subset_range[1]
):
self.csv_db.append(csv_row)
else:
self.csv_db.append(csv_row)
else:
self.csv_db.append(csv_row)
pass
# end append_row_to_db
def append_row_to_dict(p_row):
'''Append the corrected row to a dictionary structure'''
csv_row = {}
for j, field in enumerate(self.header):
csv_row[field] = p_row[j]
if csv_row[self.key1].isnumeric():
if not self.subset_range or (
int(csv_row[self.key1]) >= self.subset_range[0]
and int(csv_row[self.key1]) <= self.subset_range[1]
):
self.csv_db[csv_row[self.key1]] = csv_row
else:
self.csv_db[csv_row[self.key1]] = csv_row
pass
# end append_row_to_dict
def del_header():
'''Delete the header according to parameter switch'''
if self.del_head:
if self.struc_type == [] or self.struc_type == ():
del self.csv_db[0]
elif self.struc_type == {}:
del self.csv_db[self.key1]
pass
# end del_header
def replace_header():
'''Replace the header according to parameter switch'''
if not self.header:
self.header = (
raw_csv_file_data[0]
.rstrip('\n')
.replace('"', '')
.split(self.data_delimiter)
)
if self.replace_header:
if self.struc_type == []:
self.csv_db.append(self.header)
elif self.struc_type == ():
self.csv_db.append(tuple(self.header))
elif self.struc_type == {}:
self.csv_db[self.key1] = dict(
zip(
self.header,
[
x.strip('\n"')
for x in raw_csv_file_data[0].split(
self.header_delimiter
)
],
)
)
del raw_csv_file_data[0]
self.nr_of_rows -= 1
pass
# end replace_header
if os.path.isfile(self.csv_file_fame):
csv_file = open(self.csv_file_fame, 'r', encoding='utf-8', errors='ignore')
raw_csv_file_data = csv_file.readlines()
csv_file.close()
self.nr_of_rows = len(raw_csv_file_data)
if self.nr_of_rows:
get_delimiter()
# str_end = 0
replace_header()
header_len = len(self.header)
if self.key1:
key1_index = self.header.index(self.key1)
else:
key1_index = None
msg = beetools.msg_display(
'Reading {} ({})'.format(
os.path.split(self.csv_file_fame)[1], self.nr_of_rows
),
p_len=self.msg_width,
)
dfx = displayfx.DisplayFx(
self.logger_name,
self.nr_of_rows,
p_msg=msg,
p_verbose=self.silent,
p_bar_len=self.bar_len,
)
for row_cntr, row in enumerate(raw_csv_file_data):
fixed_row = fix_row(row.rstrip('\n'))
if not self.t_tow:
if (
self.match_nr_of_fields and header_len == len(fixed_row)
) or not self.match_nr_of_fields:
if self.struc_type == []:
append_row_to_list(fixed_row)
elif self.struc_type == ():
append_row_to_db(fixed_row)
else:
append_row_to_dict(fixed_row)
elif self.match_nr_of_fields:
log_str = '{};{};Unequal fields. Removed rec #{} from {};"{}";"{}";"{}"'.format(
header_len,
len(fixed_row),
row_cntr,
self.csv_file_fame,
','.join(self.header),
row,
','.join(fixed_row),
)
self.logger.warning(log_str)
dfx.update(row_cntr)
self.success = True
else:
if not self.silent:
log_str = 'File does not have data - {}'.format(self.csv_file_fame)
self.logger.warning(log_str)
del_header()
else:
if not self.silent:
log_str = 'File does not exist: {}\n'.format(self.csv_file_fame)
self.logger.warning(log_str)
return self.csv_db
# end read_one_key_csv
def read_two_key_csv(self):
'''Description'''
log_str = self.read_one_key_csv()
self.csv_db = {}
msg = beetools.msg_display('Build two key structure', p_len=self.msg_width)
dfx = displayfx.DisplayFx(
self.logger_name,
len(log_str),
p_msg=msg,
p_verbose=self.silent,
p_bar_len=self.bar_len,
)
for row_cntr, row in enumerate(log_str):
row_dict = {}
for field in self.header:
row_dict[field] = row[self.header.index(field)]
if row_dict[self.key1] not in self.csv_db:
self.csv_db[row_dict[self.key1]] = {row_dict[self.key2]: row_dict}
else:
# Cater for duplicate keys. Asume the last entry is the incorrect one.
if not row_dict[self.key2] in self.csv_db[row_dict[self.key1]]:
self.csv_db[row_dict[self.key1]][row_dict[self.key2]] = row_dict
else:
# Log duplicates. Insert wrting errors to file here if necessary.
error_str = 'read_two_key_csv Duplicate entry: %s %s' % (
row_dict[self.key1],
row_dict[self.key2],
)
print(error_str)
dfx.update(row_cntr)
# end read_two_key_csv
# end CsvWrpr
def do_tests(p_app_path='', p_cls=True):
'''This definition drives the testing and is also called from the PackageIt
module during the PIP process to establish correct functioning before
packaging it.
'''
def basic_test():
'''Basic and mandatory scenario tests for certification of the class'''
success = True
test_folder = Path(__file__).absolute().parents[3] / _name / 'Data'
test_dict01 = {
'PID': {
'PID': 'PID',
'FIDE_PlayerCode': 'FIDE_PlayerCode',
'PlayerName': 'PlayerName',
'FIDE_Federation': 'FIDE_Federation',
'Gender': 'Gender',
'BirthYear': 'BirthYear',
'FIDE_Title': 'FIDE_Title',
},
'61854': {
'PID': '61854',
'FIDE_PlayerCode': '4406176',
'PlayerName': 'Rodriguez, Sofia',
'FIDE_Federation': 'COL',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WCM',
},
'62277': {
'PID': '62277',
'FIDE_PlayerCode': '119296',
'PlayerName': 'Strgacich, Aylen',
'FIDE_Federation': 'ARG',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WFM',
},
'116355': {
'PID': '116355',
'FIDE_PlayerCode': '3003191',
'PlayerName': 'Perêz, Cecilia Manuela',
'FIDE_Federation': 'URU',
'Gender': 'F',
'BirthYear': '1998',
'FIDE_Title': None,
},
'130161': {
'PID': '130161',
'FIDE_PlayerCode': '3611906',
'PlayerName': 'Arias Cango, Nathaly',
'FIDE_Federation': 'ECU',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': None,
},
'847094': {
'PID': '847094',
'FIDE_PlayerCode': '5239109',
'PlayerName': ', John Andrei',
'FIDE_Federation': 'PHI',
'Gender': 'M',
'BirthYear': '2002',
'FIDE_Title': '',
},
}
test_dict02 = {
'PID': {
'PID': 'PID',
'FIDE_PlayerCode': 'FIDE_PlayerCode',
'PlayerName': 'PlayerName',
'FIDE_Federation': 'FIDE_Federation',
'Gender': 'Gender',
'BirthYear': 'BirthYear',
'FIDE_Title': 'FIDE_Title',
},
'62277': {
'PID': '62277',
'FIDE_PlayerCode': '119296',
'PlayerName': 'Strgacich, Aylen',
'FIDE_Federation': 'ARG',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WFM',
},
'116355': {
'PID': '116355',
'FIDE_PlayerCode': '3003191',
'PlayerName': 'Perêz, Cecilia Manuela',
'FIDE_Federation': 'URU',
'Gender': 'F',
'BirthYear': '1998',
'FIDE_Title': None,
},
'130161': {
'PID': '130161',
'FIDE_PlayerCode': '3611906',
'PlayerName': 'Arias Cango, Nathaly',
'FIDE_Federation': 'ECU',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': None,
},
}
test_dict03 = {
'PID': {
'PID': 'PID',
'FIDE_PlayerCode': 'FIDE_PlayerCode',
'PlayerName': 'PlayerName',
'Country': 'FIDE_Federation',
'Gender': 'Gender',
'BirthYear': 'BirthYear',
'FIDE_Title': 'FIDE_Title',
},
'61854': {
'PID': '61854',
'FIDE_PlayerCode': '4406176',
'PlayerName': 'Rodriguez, Sofia',
'Country': 'COL',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WCM',
},
'62277': {
'PID': '62277',
'FIDE_PlayerCode': '119296',
'PlayerName': 'Strgacich, Aylen',
'Country': 'ARG',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WFM',
},
'116355': {
'PID': '116355',
'FIDE_PlayerCode': '3003191',
'PlayerName': 'Perêz, Cecilia Manuela',
'Country': 'URU',
'Gender': 'F',
'BirthYear': '1998',
'FIDE_Title': None,
},
'130161': {
'PID': '130161',
'FIDE_PlayerCode': '3611906',
'PlayerName': 'Arias Cango, Nathaly',
'Country': 'ECU',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': None,
},
'847094': {
'PID': '847094',
'FIDE_PlayerCode': '5239109',
'PlayerName': ', John Andrei',
'Country': 'PHI',
'Gender': 'M',
'BirthYear': '2002',
'FIDE_Title': '',
},
}
test_dict04 = {
'EventID': {
'GameID': {
'EventID': 'EventID',
'GameID': 'GameID',
'WhitePlayerID': 'WhitePlayerID',
'BlackPlayerID': 'BlackPlayerID',
'WhiteScore': 'WhiteScore',
'DayID': 'DayID',
'M60': 'M60',
}
},
'10103447': {
'63905487': {
'EventID': '10103447',
'GameID': '63905487',
'WhitePlayerID': '213929',
'BlackPlayerID': '275294',
'WhiteScore': '1',
'DayID': '3259',
'M60': '120',
},
'63905515': {
'EventID': '10103447',
'GameID': '63905515',
'WhitePlayerID': '165389',
'BlackPlayerID': '213929',
'WhiteScore': '0.5',
'DayID': '3261',
'M60': '120',
},
'63905554': {
'EventID': '10103447',
'GameID': '63905554',
'WhitePlayerID': '213929',
'BlackPlayerID': '142290',
'WhiteScore': '0.5',
'DayID': '3263',
'M60': '120',
},
},
'10113973': {
'57140921': {
'EventID': '10113973',
'GameID': '57140921',
'WhitePlayerID': '426349',
'BlackPlayerID': '47414',
'WhiteScore': '0.5',
'DayID': '3404',
'M60': '120',
},
'57140922': {
'EventID': '10113973',
'GameID': '57140922',
'WhitePlayerID': '394201',
'BlackPlayerID': '426349',
'WhiteScore': '0',
'DayID': '3407',
'M60': '120',
},
'57140904': {
'EventID': '10113973',
'GameID': '57140904',
'WhitePlayerID': '237614',
'BlackPlayerID': '81308',
'WhiteScore': '0.5',
'DayID': '3406',
'M60': '120',
},
},
'10113980': {
'57141255': {
'EventID': '10113980',
'GameID': '57141255',
'WhitePlayerID': '81309',
'BlackPlayerID': '90313',
'WhiteScore': '0.5',
'DayID': '3408',
'M60': '120',
},
'57141253': {
'EventID': '10113980',
'GameID': '57141253',
'WhitePlayerID': '23442',
'BlackPlayerID': '119195',
'WhiteScore': '0',
'DayID': '3405',
'M60': '120',
},
'57141261': {
'EventID': '10113980',
'GameID': '57141261',
'WhitePlayerID': '23542',
'BlackPlayerID': '81319',
'WhiteScore': '1',
'DayID': '3406',
'M60': '120',
},
},
}
test_dict05 = {
'EventID': {
'GameID': {
'EventID': 'EventID',
'GameID': 'GameID',
'WhitePlayerID': 'WhitePlayerID',
'BlackPlayerID': 'BlackPlayerID',
'WhiteScore': 'WhiteScore',
'DayID': 'DayID',
'M60': 'M60',
}
},
'10103447': {
'63905487': {
'EventID': '10103447',
'GameID': '63905487',
'WhitePlayerID': '213929',
'BlackPlayerID': '275294',
'WhiteScore': '1',
'DayID': '3259',
'M60': '120',
},
'63905515': {
'EventID': '10103447',
'GameID': '63905515',
'WhitePlayerID': '165389',
'BlackPlayerID': '213929',
'WhiteScore': '0.5',
'DayID': '3261',
'M60': '120',
},
'63905554': {
'EventID': '10103447',
'GameID': '63905554',
'WhitePlayerID': '213929',
'BlackPlayerID': '142290',
'WhiteScore': '0.5',
'DayID': '3263',
'M60': '120',
},
},
'10113973': {
'57140921': {
'EventID': '10113973',
'GameID': '57140921',
'WhitePlayerID': '426349',
'BlackPlayerID': '47414',
'WhiteScore': '0.5',
'DayID': '3404',
'M60': '120',
},
'57140922': {
'EventID': '10113973',
'GameID': '57140922',
'WhitePlayerID': '394201',
'BlackPlayerID': '426349',
'WhiteScore': '0',
'DayID': '3407',
'M60': '120',
},
'57140904': {
'EventID': '10113973',
'GameID': '57140904',
'WhitePlayerID': '237614',
'BlackPlayerID': '81308',
'WhiteScore': '0.5',
'DayID': '3406',
'M60': '120',
},
},
}
test_dict06 = {
'PID': {
'PID': 'PID',
'FIDE_PlayerCode': 'FIDE_PlayerCode',
'PlayerName': 'PlayerName',
'FIDE_Federation': 'FIDE_Federation',
'Gender': 'Gender',
'BirthYear': 'BirthYear',
'FIDE_Title': 'FIDE_Title',
},
'61854': {
'PID': '61854',
'FIDE_PlayerCode': '4406176',
'PlayerName': 'Rodriguez, Sofia',
'FIDE_Federation': 'COL',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WCM',
},
'62277': {
'PID': '62277',
'FIDE_PlayerCode': '119296',
'PlayerName': 'Strgacich, Aylen',
'FIDE_Federation': 'ARG',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WFM',
},
'130161': {
'PID': '130161',
'FIDE_PlayerCode': '3611906',
'PlayerName': 'Arias Cango, Nathaly',
'FIDE_Federation': 'ECU',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': None,
},
}
test_dict07 = {
'PID': {
'PID': 'PID',
'FIDE_PlayerCode': 'FIDE_PlayerCode',
'PlayerName': 'PlayerName',
'FIDE_Federation': 'FIDE_Federation',
'Gender': 'Gender',
'BirthYear': 'BirthYear',
'FIDE_Title': 'FIDE_Title',
},
'61854': {
'PID': '61854',
'FIDE_PlayerCode': '4406176',
'PlayerName': 'Rodriguez, Sofia',
'FIDE_Federation': 'COL',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WCM',
},
'62277': {
'PID': '62277',
'FIDE_PlayerCode': '119296',
'PlayerName': 'Strgacich, Aylen',
'FIDE_Federation': 'ARG',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WFM',
},
'116355': {
'PID': '116355',
'FIDE_PlayerCode': '3003191',
'PlayerName': 'Perêz, Cecilia Manuela',
'FIDE_Federation': 'URU',
'Gender': 'F',
'BirthYear': '1998',
'FIDE_Title': None,
},
'130161': {
'PID': '130161',
'FIDE_PlayerCode': '3611906',
'PlayerName': 'Arias Cango, Nathaly',
'FIDE_Federation': 'ECU',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': None,
},
'149055': {
'PID': '149055',
'FIDE_PlayerCode': '14306980',
'PlayerName': 'Du Toit, Hendrik',
'FIDE_Federation': 'RSA',
'Gender': 'M',
'BirthYear': '1968',
'FIDE_Title': 'IA',
},
'847094': {
'PID': '847094',
'FIDE_PlayerCode': '5239109',
'PlayerName': ', John Andrei',
'FIDE_Federation': 'PHI',
'Gender': 'M',
'BirthYear': '2002',
'FIDE_Title': '',
},
}
test_dict08 = {
'PID': {
'PID': 'PID',
'FIDE_PlayerCode': 'FIDE_PlayerCode',
'PlayerName': 'PlayerName',
'FIDE_Federation': 'FIDE_Federation',
'Gender': 'Gender',
'BirthYear': 'BirthYear',
'FIDE_Title': 'FIDE_Title',
},
'61854': {
'PID': '61854',
'FIDE_PlayerCode': '4406176',
'PlayerName': 'Rodriguez, Sofia',
'FIDE_Federation': 'COL',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WCM',
},
'62277': {
'PID': '62277',
'FIDE_PlayerCode': '119296',
'PlayerName': 'Strgacich, Aylen',
'FIDE_Federation': 'ARG',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': 'WFM',
},
'116355': {
'PID': '116355',
'FIDE_PlayerCode': '3003191',
'PlayerName': 'Perêz, Cecilia Manuela',
'FIDE_Federation': 'URU',
'Gender': 'F',
'BirthYear': '1998',
'FIDE_Title': None,
},
'130161': {
'PID': '130161',
'FIDE_PlayerCode': '3611906',
'PlayerName': 'Arias Cango, Nathaly',
'FIDE_Federation': 'ECU',
'Gender': 'F',
'BirthYear': '1999',
'FIDE_Title': None,
},
'847094': {
'PID': '847094',
'FIDE_PlayerCode': '5239109',
'PlayerName': ', John Andrei',
'FIDE_Federation': 'PHI',
'Gender': 'M',
'BirthYear': '2002',
'FIDE_Title': '',
},
}
test_dict09 = {
'PID': {
'PID': 'PID',
'FIDE_PlayerCode': 'FIDE_PlayerCode',
'PlayerName': 'PlayerName',
'FIDE_Federation': 'FIDE_Federation',
'Gender': 'Gender',
'BirthYear': 'BirthYear',
'FIDE_Title': 'FIDE_Title',
'': '',
},
'116355': {
'PID': '116355',
'FIDE_PlayerCode': '3003191',
'PlayerName': 'Perêz, Cecilia Manuela',
'FIDE_Federation': 'URU',
'Gender': 'F',
'BirthYear': '1998',
'FIDE_Title': None,
'': None,
},
}
test_list01 = [
[
'PID',
'FIDE_PlayerCode',
'PlayerName',
'FIDE_Federation',
'Gender',
'BirthYear',
'FIDE_Title',
],
['61854', '4406176', 'Rodriguez, Sofia', 'COL', 'F', '1999', 'WCM'],
['62277', '119296', 'Strgacich, Aylen', 'ARG', 'F', '1999', 'WFM'],
['116355', '3003191', 'Perêz, Cecilia Manuela', 'URU', 'F', '1998', None],
['130161', '3611906', 'Arias Cango, Nathaly', 'ECU', 'F', '1999', None],
['847094', '5239109', ', John Andrei', 'PHI', 'M', '2002', ''],
]
test_list02 = [
[
'PID',
'FIDE_PlayerCode',
'PlayerName',
'Country',
'Gender',
'BirthYear',
'FIDE_Title',
],
['61854', '4406176', 'Rodriguez, Sofia', 'COL', 'F', '1999', 'WCM'],
['62277', '119296', 'Strgacich, Aylen', 'ARG', 'F', '1999', 'WFM'],
['116355', '3003191', 'Perêz, Cecilia Manuela', 'URU', 'F', '1998', None],
['130161', '3611906', 'Arias Cango, Nathaly', 'ECU', 'F', '1999', None],
['847094', '5239109', ', John Andrei', 'PHI', 'M', '2002', ''],
]
test_list03 = [
[
'PID',
'FIDE_PlayerCode',
'PlayerName',
'FIDE_Federation',
'Gender',
'BirthYear',
'FIDE_Title',
],
['61854', '4406176', 'Rodriguez, Sofia', 'COL', 'F', '1999', 'WCM'],
['62277', '119296', 'Strgacich_a, Aylen', 'ARG', 'F', '1999', 'WFM'],
['62277', '119296', 'Strgacich_b, Aylen', 'ARG', 'F', '1999', 'WFM'],
['130161', '3611906', 'Arias Cango, Nathaly', 'ECU', 'F', '1999', None],
]
test_list04 = [
[
'PID',
'FIDE_PlayerCode',
'PlayerName',
'FIDE_Federation',
'Gender',
'BirthYear',
'FIDE_Title',
],
['61854', '4406176', 'Rodriguez, Sofia', 'COL', 'F', '1999', 'WCM'],
['62277', '119296', 'Strgacich, Aylen', 'ARG', 'F', '1999', 'WFM'],
['116355', '3003191', 'Perêz, Cecilia Manuela', 'URU', 'F', '1998', 'NULL'],
['130161', '3611906', 'Arias Cango, Nathaly', 'ECU', 'F', '1999', 'None'],
['847094', '5239109', ', John Andrei', 'PHI', 'M', '2002', ''],
]
test_list05 = [
[
'PID',
'FIDE_PlayerCode',
'PlayerName',
'FIDE_Federation',
'Gender',
'BirthYear',
'FIDE_Title',
],
['116355', '3003191', 'Perêz, Cecilia Manuela', 'URU', 'F', '1998', 'None'],
['130161', '3611906', 'Arias Cango, Nathaly', 'ECU', 'F', '1999', 'None'],
['847094', '5239109', ', John Andrei', 'PHI', 'M', '2002', ''],
]
test_db01 = [
(
'PID',
'FIDE_PlayerCode',
'PlayerName',
'FIDE_Federation',
'Gender',
'BirthYear',
'FIDE_Title',
),
('61854', '4406176', 'Rodriguez, Sofia', 'COL', 'F', '1999', 'WCM'),
('62277', '119296', 'Strgacich, Aylen', 'ARG', 'F', '1999', 'WFM'),
('116355', '3003191', 'Perêz, Cecilia Manuela', 'URU', 'F', '1998', None),
('130161', '3611906', 'Arias Cango, Nathaly', 'ECU', 'F', '1999', None),
('847094', '5239109', ', John Andrei', 'PHI', 'M', '2002', ''),
]
test_db02 = [
(
'PID',
'FIDE_PlayerCode',
'PlayerName',
'FIDE_Federation',
'Gender',
'BirthYear',
'FIDE_Title',
),
('62277', '119296', 'Strgacich, Aylen', 'ARG', 'F', '1999', 'WFM'),
('116355', '3003191', 'Perêz, Cecilia Manuela', 'URU', 'F', '1998', None),
('130161', '3611906', 'Arias Cango, Nathaly', 'ECU', 'F', '1999', None),
]
test_db03 = [
(
'PID',
'FIDE_PlayerCode',
'PlayerName',
'Country',
'Gender',
'BirthYear',
'FIDE_Title',
),
('61854', '4406176', 'Rodriguez, Sofia', 'COL', 'F', '1999', 'WCM'),
('62277', '119296', 'Strgacich, Aylen', 'ARG', 'F', '1999', 'WFM'),
('116355', '3003191', 'Perêz, Cecilia Manuela', 'URU', 'F', '1998', None),
('130161', '3611906', 'Arias Cango, Nathaly', 'ECU', 'F', '1999', None),
('847094', '5239109', ', John Andrei', 'PHI', 'M', '2002', ''),
]
print('Test constructor with Dict')
csv_wrpr = CsvWrpr(_name, os.path.join(test_folder, 'Players.csv'), 'PID')
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_dict01):
success = False
print('Test constructor with Dict and subset')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
'PID',
p_subset_range=[62277, 130161],
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_dict02):
success = False
print('Test constructor with List')
csv_wrpr = CsvWrpr(
_name, os.path.join(test_folder, 'Players.csv'), p_struc_type=[]
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_list01, 'List'):
success = False
print('Test constructor with db')
csv_wrpr = CsvWrpr(
_name, os.path.join(test_folder, 'Players.csv'), p_struc_type=()
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_db01, 'db'):
success = False
print('Test constructor with db and subset')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
'PID',
p_struc_type=(),
p_subset_range=[62277, 130161],
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_db02, 'db'):
success = False
print('Test "p_replace_header" in Dict')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
'PID',
p_header=[
'PID',
'FIDE_PlayerCode',
'PlayerName',
'Country',
'Gender',
'BirthYear',
'FIDE_Title',
],
p_replace_header=True,
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_dict03):
success = False
print('Test "replace_header" in list')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
p_struc_type=[],
p_header=[
'PID',
'FIDE_PlayerCode',
'PlayerName',
'Country',
'Gender',
'BirthYear',
'FIDE_Title',
],
p_replace_header=True,
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_list02, 'List'):
success = False
print('Test "replace_header" in db')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
p_struc_type=(),
p_header=[
'PID',
'FIDE_PlayerCode',
'PlayerName',
'Country',
'Gender',
'BirthYear',
'FIDE_Title',
],
p_replace_header=True,
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_db03, 'db'):
success = False
print('Test for key file')
csv_wrpr = CsvWrpr(
_name, os.path.join(test_folder, 'Game.csv'), 'EventID', 'GameID'
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_dict04):
success = False
print('Test for key file with sub range')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'Game.csv'),
'EventID',
'GameID',
p_subset_range=[10103447, 10113973],
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_dict05):
success = False
print('Test exporting a sub set from a list')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'PlayersDup.csv'),
p_key1='PID',
p_struc_type=[],
)
csv_wrpr.export_sub_set(
os.path.join(test_folder, 'NewPlayers.csv'), ['61854', '62277', '130161']
)
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'NewPlayers.csv'),
p_key1='PID',
p_struc_type=[],
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_list03, 'List'):
success = False
if os.path.isfile(os.path.join(test_folder, 'NewPlayers.csv')):
os.remove(os.path.join(test_folder, 'NewPlayers.csv'))
print('Test exporting a sub set from a dict')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
p_key1='PID',
p_struc_type={},
)
csv_wrpr.export_sub_set(
os.path.join(test_folder, 'NewPlayers.csv'), ['61854', '62277', '130161']
)
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'NewPlayers.csv'),
p_key1='PID',
p_struc_type={},
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_dict06, 'List'):
success = False
if os.path.isfile(os.path.join(test_folder, 'NewPlayers.csv')):
os.remove(os.path.join(test_folder, 'NewPlayers.csv'))
print('Test something 01??')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
p_key1='PID',
p_struc_type={},
)
csv_wrpr.append(os.path.join(test_folder, 'PlayersExtra.csv'), p_key1='PID')
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_dict07, 'List'):
success = False
print('Test converting "None" as field contents to None')
csv_wrpr1 = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
p_key1='PID',
p_struc_type=[],
)
csv_wrpr1.export(os.path.join(test_folder, 'PlayersExport.csv'))
csv_wrpr2 = CsvWrpr(
_name,
os.path.join(test_folder, 'PlayersExport.csv'),
p_key1='PID',
p_struc_type=[],
)
if not beetools.is_struct_the_same(csv_wrpr2.csv_db, test_list01, 'List'):
success = False
if os.path.isfile(os.path.join(test_folder, 'PlayersExport.csv')):
os.remove(os.path.join(test_folder, 'PlayersExport.csv'))
print('Test suppressing converting "None" as field contents to None')
csv_wrpr1 = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
p_key1='PID',
p_struc_type=[],
p_convert_none=False,
)
csv_wrpr1.export(os.path.join(test_folder, 'PlayersExport.csv'))
csv_wrpr2 = CsvWrpr(
_name,
os.path.join(test_folder, 'PlayersExport.csv'),
p_key1='PID',
p_struc_type=[],
p_convert_none=False,
)
if not beetools.is_struct_the_same(csv_wrpr2.csv_db, test_list04, 'List'):
success = False
if os.path.isfile(os.path.join(test_folder, 'PlayersExport.csv')):
os.remove(os.path.join(test_folder, 'PlayersExport.csv'))
print('Test something??')
csv_wrpr1 = CsvWrpr(
_name,
os.path.join(test_folder, 'Players.csv'),
p_key1='PID',
p_struc_type={},
)
csv_wrpr1.export(os.path.join(test_folder, 'PlayersExport.csv'))
csv_wrpr2 = CsvWrpr(
_name,
os.path.join(test_folder, 'PlayersExport.csv'),
p_key1='PID',
p_struc_type={},
)
if not beetools.is_struct_the_same(csv_wrpr2.csv_db, test_dict08, 'List'):
success = False
if os.path.isfile(os.path.join(test_folder, 'PlayersExport.csv')):
os.remove(os.path.join(test_folder, 'PlayersExport.csv'))
print('Test empty file')
csv_wrpr1 = CsvWrpr(
_name,
os.path.join(test_folder, 'EmptyFile.csv'),
p_key1='PID',
p_struc_type=[],
)
if csv_wrpr1.csv_db:
success = False
print('Test empty lines')
csv_wrpr1 = CsvWrpr(
_name,
os.path.join(test_folder, 'EmptyLines.csv'),
p_key1='PID',
p_struc_type=[],
p_match_nr_of_fields=True,
)
csv_wrpr1.export(os.path.join(test_folder, 'EmptyLinesExport.csv'))
csv_wrpr2 = CsvWrpr(
_name,
os.path.join(test_folder, 'EmptyLinesExport.csv'),
p_key1='PID',
p_struc_type=[],
p_convert_none=False,
)
if not beetools.is_struct_the_same(csv_wrpr2.csv_db, test_list05, 'List'):
success = False
if os.path.isfile(os.path.join(test_folder, 'EmptyLinesExport.csv')):
os.remove(os.path.join(test_folder, 'EmptyLinesExport.csv'))
print('Incorrect header')
csv_wrpr = CsvWrpr(
_name,
os.path.join(test_folder, 'LongHeader.csv'),
'PID',
p_match_nr_of_fields=True,
)
if not beetools.is_struct_the_same(csv_wrpr.csv_db, test_dict09):
success = False
return success
# end basic_test
success = True
b_tls = beetools.Archiver(__doc__[0], p_app_path, p_cls=p_cls)
logger = logging.getLogger(_name)
logger.setLevel(beetools.DEF_LOG_LEV)
file_handle = logging.FileHandler(beetools.LOG_FILE_NAME, mode='w')
file_handle.setLevel(beetools.DEF_LOG_LEV_FILE)
console_handle = logging.StreamHandler()
console_handle.setLevel(beetools.DEF_LOG_LEV_CON)
file_format = logging.Formatter(
beetools.LOG_FILE_FORMAT, datefmt=beetools.LOG_DATE_FORMAT
)
console_format = logging.Formatter(beetools.LOG_CONSOLE_FORMAT)
file_handle.setFormatter(file_format)
console_handle.setFormatter(console_format)
logger.addHandler(file_handle)
logger.addHandler(console_handle)
b_tls.print_header(p_cls=p_cls)
success = basic_test()
beetools.result_rep(success, 'Done')
b_tls.print_footer()
# end do_tests
if __name__ == '__main__':
do_tests(p_app_path=_path)
# end __main__
| 38.323606 | 110 | 0.454099 | 5,637 | 59,095 | 4.53681 | 0.087635 | 0.011535 | 0.017205 | 0.024087 | 0.755142 | 0.708962 | 0.681121 | 0.646203 | 0.626378 | 0.607648 | 0 | 0.065619 | 0.432659 | 59,095 | 1,541 | 111 | 38.348475 | 0.697169 | 0.125408 | 0 | 0.622964 | 0 | 0 | 0.197407 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.013964 | false | 0.005431 | 0.004655 | 0 | 0.02405 | 0.017067 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00d12aeb9d9de5ac0b3bd03bbb59f61629f4687c | 359 | py | Python | apps/fyle_connect/admin.py | akshay-codemonk/fyle-xero | a040dab12282a9a64ca01aca2dc09f8bb7eaa0f6 | [
"MIT"
] | null | null | null | apps/fyle_connect/admin.py | akshay-codemonk/fyle-xero | a040dab12282a9a64ca01aca2dc09f8bb7eaa0f6 | [
"MIT"
] | null | null | null | apps/fyle_connect/admin.py | akshay-codemonk/fyle-xero | a040dab12282a9a64ca01aca2dc09f8bb7eaa0f6 | [
"MIT"
] | null | null | null | from django.contrib import admin
from apps.fyle_connect.models import FyleAuth
class FyleAuthAdmin(admin.ModelAdmin):
"""
Admin options for FyleAuth Model
"""
list_display = ('id', 'created_at', 'updated_at')
list_filter = ['created_at', 'updated_at']
# Register FyleAuth model with admin
admin.site.register(FyleAuth, FyleAuthAdmin)
| 22.4375 | 53 | 0.729805 | 44 | 359 | 5.795455 | 0.590909 | 0.101961 | 0.12549 | 0.141176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.164345 | 359 | 15 | 54 | 23.933333 | 0.85 | 0.189415 | 0 | 0 | 0 | 0 | 0.152727 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
00d56a1ae2bd841557627431b078c02d5514f6c3 | 2,170 | py | Python | modules/setelah_usulan.py | dindamajesty13/web-risk | 8ba90ceecf3146049a44df7359ac021de0a3d4ce | [
"MIT"
] | null | null | null | modules/setelah_usulan.py | dindamajesty13/web-risk | 8ba90ceecf3146049a44df7359ac021de0a3d4ce | [
"MIT"
] | null | null | null | modules/setelah_usulan.py | dindamajesty13/web-risk | 8ba90ceecf3146049a44df7359ac021de0a3d4ce | [
"MIT"
] | null | null | null | from lib.database import db_connect
def getSetelahUsulan():
conn = db_connect()
with conn:
cur = conn.cursor()
cur.execute(f"SELECT id_app, criteria_1, criteria_2, criteria_3, criteria_4, criteria_5, ((public.nilai_resiko_setelah_usulan.criteria_1*0.30) + (public.nilai_resiko_setelah_usulan.criteria_2*0.20) + (public.nilai_resiko_setelah_usulan.criteria_3*0.10) + (public.nilai_resiko_setelah_usulan.criteria_4*0.25) + (public.nilai_resiko_setelah_usulan.criteria_5*0.15)) as result_assessment FROM public.nilai_resiko_setelah_usulan WHERE email = 'user@gmail.com' ORDER BY id_app ASC")
data = cur.fetchall()
return data
def getCountSetelahUsulan(email):
conn = db_connect()
with conn:
cur = conn.cursor()
cur.execute(f"select count(email) from public.nilai_resiko_setelah_usulan where email = '{email}'")
data = cur.fetchall()
return data
def insertSetelahUsulan(email, id_app, criteria_1, criteria_2, criteria_3, criteria_4, criteria_5):
conn = db_connect()
with conn:
cur = conn.cursor()
cur.execute(f"INSERT INTO public.nilai_resiko_setelah_usulan (email, id_app, criteria_1, criteria_2, criteria_3, criteria_4, criteria_5) VALUES ('{email}','{id_app}', '{criteria_1}', '{criteria_2}', '{criteria_3}', '{criteria_4}', '{criteria_5}')")
return True
def updateSetelahUsulan(email, id_app, criteria_1, criteria_2, criteria_3, criteria_4, criteria_5):
conn = db_connect()
with conn:
cur = conn.cursor()
cur.execute(f"UPDATE public.nilai_resiko_setelah_usulan SET criteria_1='{criteria_1}', criteria_2='{criteria_2}', criteria_3='{criteria_3}', criteria_4='{criteria_4}', criteria_5='{criteria_5}' WHERE public.nilai_resiko_setelah_usulan.id_app='{id_app}' AND public.nilai_resiko_setelah_usulan.email='{email}'")
return True
def deleteSetelahUsulan(id_app, email):
conn = db_connect()
with conn:
cur = conn.cursor()
cur.execute(f"DELETE FROM public.nilai_resiko_setelah_usulan WHERE public.nilai_resiko_saat_usulan.id_app='{id_app}' AND public.nilai_resiko_saat_usulan.email='{email}'")
return True
| 55.641026 | 485 | 0.729954 | 310 | 2,170 | 4.787097 | 0.2 | 0.103774 | 0.160377 | 0.19407 | 0.803235 | 0.680593 | 0.48248 | 0.456199 | 0.3969 | 0.348383 | 0 | 0.029794 | 0.149309 | 2,170 | 38 | 486 | 57.105263 | 0.774106 | 0 | 0 | 0.666667 | 0 | 0.121212 | 0.563594 | 0.359908 | 0 | 0 | 0 | 0 | 0 | 1 | 0.151515 | false | 0 | 0.030303 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00d90108ed39c231fe1165e497bf8f3a79a8e675 | 1,201 | py | Python | python/solutii/cristina_ungureanu/paranteze/paranteze.py | broascaiulian/labs | 068c7f440c7a29cb6a3e1dbb8e4bb7dfaff5a050 | [
"MIT"
] | null | null | null | python/solutii/cristina_ungureanu/paranteze/paranteze.py | broascaiulian/labs | 068c7f440c7a29cb6a3e1dbb8e4bb7dfaff5a050 | [
"MIT"
] | null | null | null | python/solutii/cristina_ungureanu/paranteze/paranteze.py | broascaiulian/labs | 068c7f440c7a29cb6a3e1dbb8e4bb7dfaff5a050 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# *-* coding: UTF-8 *-*
"""Stabileste daca o expresie de paranteze este corecta."""
def este_corect(expresie):
"""Apreciaza corectitudinea expresiei."""
memo = []
for _, val in enumerate(expresie):
if val not in '([)]':
return False
if val == '(' or val == '[':
memo.append(val)
if val == ')':
if memo and memo[len(memo)-1] == '(':
memo.pop()
else:
return False
if val == ']':
if memo and memo[len(memo)-1] == '[':
memo.pop()
else:
return False
return not memo
if __name__ == "__main__":
assert not este_corect("[9]")
assert not este_corect("[")
assert este_corect("[()[]]"), "Probleme la expresia 1"
assert este_corect("()()[][]"), "Probleme la expresia 2"
assert este_corect("([([])])"), "Probleme la expresia 3"
assert not este_corect("[)()()()"), "Probleme la expresia 4"
assert not este_corect("][[()][]"), "Probleme la expresia 5"
assert not este_corect("([()]))"), "Probleme la expresia 6"
assert not este_corect("([)]"), "Probleme la expresia 7"
| 31.605263 | 64 | 0.524563 | 137 | 1,201 | 4.459854 | 0.357664 | 0.163666 | 0.206219 | 0.229133 | 0.566285 | 0.566285 | 0.399345 | 0.157119 | 0.157119 | 0.157119 | 0 | 0.013018 | 0.29642 | 1,201 | 37 | 65 | 32.459459 | 0.710059 | 0.109908 | 0 | 0.25 | 0 | 0 | 0.212867 | 0 | 0 | 0 | 0 | 0 | 0.321429 | 1 | 0.035714 | false | 0 | 0 | 0 | 0.178571 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00d9864a32338409071e3571576b9810d7629a8e | 135 | py | Python | CURSO PYTHON/exerciciosSecao1e2/exe004.py | Sabrinaparussoli/PYTHON | 77436608ffd799e9e2bbe4fa5084443fb7382793 | [
"MIT"
] | null | null | null | CURSO PYTHON/exerciciosSecao1e2/exe004.py | Sabrinaparussoli/PYTHON | 77436608ffd799e9e2bbe4fa5084443fb7382793 | [
"MIT"
] | null | null | null | CURSO PYTHON/exerciciosSecao1e2/exe004.py | Sabrinaparussoli/PYTHON | 77436608ffd799e9e2bbe4fa5084443fb7382793 | [
"MIT"
] | null | null | null | x = 0
soma = 0
i = 0
while x != -1:
x = int(input('digite uma idade: '))
if x != -1:
soma += x
i += 1
print(soma/i) | 15 | 39 | 0.444444 | 25 | 135 | 2.4 | 0.52 | 0.066667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.069767 | 0.362963 | 135 | 9 | 40 | 15 | 0.627907 | 0 | 0 | 0 | 0 | 0 | 0.132353 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00dc62e3e02feee770ab4f7c40698eee1c4baffc | 142 | py | Python | other-languages/python/21-hackerrank.py | fahimfarhan/legendary-coding-odyssey | 55289e05aa04f866201c607bed00c505cd9c4df9 | [
"MIT"
] | 3 | 2019-07-20T07:26:31.000Z | 2020-08-06T09:31:09.000Z | other-languages/python/21-hackerrank.py | fahimfarhan/legendary-coding-odyssey | 55289e05aa04f866201c607bed00c505cd9c4df9 | [
"MIT"
] | null | null | null | other-languages/python/21-hackerrank.py | fahimfarhan/legendary-coding-odyssey | 55289e05aa04f866201c607bed00c505cd9c4df9 | [
"MIT"
] | 4 | 2019-06-20T18:43:32.000Z | 2020-10-07T16:45:23.000Z | if __name__ == '__main__':
n = int(input())
output = ""
for i in range(1, n+1):
output = output + str(i)
print(output) | 23.666667 | 32 | 0.521127 | 20 | 142 | 3.3 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020408 | 0.309859 | 142 | 6 | 33 | 23.666667 | 0.653061 | 0 | 0 | 0 | 0 | 0 | 0.055944 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00e8fc707daef659cc8b3a9e83f1f89a7e7b704e | 13,804 | py | Python | tests/test_getset.py | LSSTDESC/healsparse | f6b15f570ab6335328e34006f69c3919d9fcf1c8 | [
"BSD-3-Clause"
] | 8 | 2019-05-06T11:42:41.000Z | 2021-10-08T14:57:12.000Z | tests/test_getset.py | LSSTDESC/healsparse | f6b15f570ab6335328e34006f69c3919d9fcf1c8 | [
"BSD-3-Clause"
] | 75 | 2019-03-01T23:25:26.000Z | 2022-01-29T21:40:27.000Z | tests/test_getset.py | LSSTDESC/healsparse | f6b15f570ab6335328e34006f69c3919d9fcf1c8 | [
"BSD-3-Clause"
] | 3 | 2020-01-30T19:10:19.000Z | 2022-03-08T14:57:38.000Z | import unittest
import numpy.testing as testing
import numpy as np
import healpy as hp
from numpy import random
import healsparse
class GetSetTestCase(unittest.TestCase):
def test_getitem_single(self):
"""
Test __getitem__ single value
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
# Grab a single item, in range
testing.assert_almost_equal(sparse_map[100], full_map[100])
# Grab a single item out of range
testing.assert_almost_equal(sparse_map[6000], full_map[6000])
def test_getitem_recarray_single(self):
"""
Test __getitem__ from a recarray
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 128
dtype = [('col1', 'f8'), ('col2', 'f8')]
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype, primary='col1')
pixel = np.arange(5000)
values = np.zeros_like(pixel, dtype=dtype)
values['col1'] = random.random(size=pixel.size)
values['col2'] = random.random(size=pixel.size)
sparse_map.update_values_pix(pixel, values)
# Test name access
test = sparse_map['col1']
testing.assert_array_almost_equal(test.get_values_pix(test.valid_pixels),
values['col1'])
# Test index access
test_item = sparse_map[1000]
testing.assert_almost_equal(test_item['col1'], values['col1'][1000])
testing.assert_almost_equal(test_item['col2'], values['col2'][1000])
test_item = sparse_map[10000]
testing.assert_almost_equal(test_item['col1'], hp.UNSEEN)
testing.assert_almost_equal(test_item['col2'], hp.UNSEEN)
def test_getitem_slice(self):
"""
Test __getitem__ using slices
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
# Test in-range, overlap, out-of-range
testing.assert_array_almost_equal(sparse_map[100: 500], full_map[100: 500])
testing.assert_array_almost_equal(sparse_map[4500: 5500], full_map[4500: 5500])
testing.assert_array_almost_equal(sparse_map[5500: 5600], full_map[5500: 5600])
# Test stepped
testing.assert_array_almost_equal(sparse_map[100: 500: 2], full_map[100: 500: 2])
testing.assert_array_almost_equal(sparse_map[4500: 5500: 2], full_map[4500: 5500: 2])
testing.assert_array_almost_equal(sparse_map[5500: 5600: 2], full_map[5500: 5600: 2])
# Test all
testing.assert_array_almost_equal(sparse_map[:], full_map[:])
def test_getitem_array(self):
"""
Test __getitem__ using an array
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
indices = np.array([1, 2, 100, 500, 10000])
testing.assert_array_almost_equal(sparse_map[indices], full_map[indices])
testing.assert_almost_equal(sparse_map[indices[0]], full_map[indices[0]])
indices = np.array([1., 2, 100, 500, 10000])
self.assertRaises(IndexError, sparse_map.__getitem__, indices)
self.assertRaises(IndexError, sparse_map.__getitem__, indices[0])
def test_getitem_list(self):
"""
Test __getitem__ using list/tuple
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
indices = [1, 2, 100, 500, 10000]
testing.assert_array_almost_equal(sparse_map[indices], full_map[indices])
indices = [1.0, 2, 100, 500, 10000]
self.assertRaises(IndexError, sparse_map.__getitem__, indices)
def test_getitem_other(self):
"""
Test __getitem__ using something else
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
indices = (1, 2, 3, 4)
self.assertRaises(IndexError, sparse_map.__getitem__, indices)
indices = 5.0
self.assertRaises(IndexError, sparse_map.__getitem__, indices)
def test_setitem_single(self):
"""
Test __setitem__ single value
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
sparse_map[1000] = 1.0
full_map[1000] = 1.0
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
sparse_map[10000] = 1.0
full_map[10000] = 1.0
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
def test_setitem_recarray_single(self):
"""
Test __setitem__ from recarray
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 128
dtype = [('col1', 'f8'), ('col2', 'f8'), ('col3', 'i4')]
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype, primary='col1')
pixel = np.arange(5000)
values = np.zeros_like(pixel, dtype=dtype)
values['col1'] = random.random(size=pixel.size)
values['col2'] = random.random(size=pixel.size)
values['col3'] = np.ones(pixel.size, dtype=np.int32)
sparse_map.update_values_pix(pixel, values)
value = np.zeros(1, dtype=dtype)
value['col1'] = 1.0
value['col2'] = 1.0
value['col3'] = 10
sparse_map[1000] = value
testing.assert_almost_equal(sparse_map['col1'][1000], 1.0)
testing.assert_almost_equal(sparse_map['col2'][1000], 1.0)
self.assertEqual(sparse_map['col3'][1000], 10)
testing.assert_almost_equal(sparse_map[1000]['col1'], 1.0)
testing.assert_almost_equal(sparse_map[1000]['col2'], 1.0)
self.assertEqual(sparse_map[1000]['col3'], 10)
self.assertRaises(IndexError, sparse_map.__setitem__, 'col1', 1.0)
# Try setting individual columns... test both ways of calling
# although only the one works for setting
sparse_map['col1'][100] = 100.0
testing.assert_almost_equal(sparse_map['col1'][100], 100.0)
testing.assert_almost_equal(sparse_map[100]['col1'], 100.0)
sparse_map['col2'][100] = 100.0
testing.assert_almost_equal(sparse_map['col2'][100], 100.0)
testing.assert_almost_equal(sparse_map[100]['col2'], 100.0)
sparse_map['col3'][100] = 100
self.assertEqual(sparse_map['col3'][100], 100)
self.assertEqual(sparse_map[100]['col3'], 100)
sparse_map['col1'][100: 200] = np.zeros(100)
testing.assert_array_almost_equal(sparse_map['col1'][100: 200], 0.0)
testing.assert_array_almost_equal(sparse_map[100: 200]['col1'], 0.0)
sparse_map['col2'][100: 200] = np.zeros(100)
testing.assert_array_almost_equal(sparse_map['col2'][100: 200], 0.0)
testing.assert_array_almost_equal(sparse_map[100: 200]['col2'], 0.0)
sparse_map['col3'][100: 200] = np.zeros(100, dtype=np.int32)
testing.assert_array_equal(sparse_map['col3'][100: 200], 0)
testing.assert_array_equal(sparse_map[100: 200]['col3'], 0)
# Finally, assert that we cannot set new pixels
self.assertRaises(RuntimeError, sparse_map['col1'].__setitem__,
10000, 10.0)
def test_setitem_slice(self):
"""
Test __setitem__ slice
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
# This needs to be accessed with an array of length 1 or same length.
sparse_map[100: 500] = np.array([1.0])
full_map[100: 500] = np.array([1.0])
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
sparse_map[1000: 1500] = np.ones(500)
full_map[1000: 1500] = np.ones(500)
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
sparse_map[10000: 11000: 2] = np.ones(500)
full_map[10000: 11000: 2] = np.ones(500)
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
# Test all
sparse_map[:] = np.array([1.0])
full_map[:] = np.array([1.0])
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
def test_setitem_array(self):
"""
Test __setitem__ array
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
indices = np.array([1, 2, 100, 500, 10000])
sparse_map[indices] = np.array([1.0])
full_map[indices] = np.array([1.0])
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
# Simple in-place operation
sparse_map[indices] += 1.0
full_map[indices] += 1.0
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
indices = np.array([1, 2, 100, 500, 10000]) + 100
sparse_map[indices] = np.ones(len(indices))
full_map[indices] = np.ones(len(indices))
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
indices = np.array([1., 2, 100, 500, 10000])
self.assertRaises(IndexError, sparse_map.__setitem__, indices, 1.0)
def test_setitem_list(self):
"""
Test __setitem__ list
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
indices = [1, 2, 100, 500, 10000]
sparse_map[indices] = np.array([1.0])
full_map[indices] = np.array([1.0])
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
indices = [101, 102, 200, 600, 10100]
sparse_map[indices] = np.ones(len(indices))
full_map[indices] = np.ones(len(indices))
testing.assert_array_almost_equal(sparse_map.generate_healpix_map(),
full_map)
indices = [1., 2, 100, 500, 10000]
self.assertRaises(IndexError, sparse_map.__setitem__, indices, 1.0)
def test_setitem_other(self):
"""
Test __setitem__ using something else
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
full_map = np.zeros(hp.nside2npix(nside_map)) + hp.UNSEEN
full_map[0: 5000] = random.random(size=5000)
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
indices = (1, 2, 3, 4)
self.assertRaises(IndexError, sparse_map.__setitem__, indices, 1.0)
indices = 5.0
self.assertRaises(IndexError, sparse_map.__setitem__, indices, 1.0)
def test_setitem_integer(self):
"""
Test __setitem__ for integer HealSparseMaps
"""
random.seed(12345)
nside_coverage = 32
nside_map = 128
pxnums = np.arange(0, 2000)
pxvalues = pxnums
full_map = np.zeros(hp.nside2npix(nside_map), dtype=pxvalues.dtype)
full_map[pxnums] = pxvalues
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage=nside_coverage,
nside_sparse=nside_map, dtype=pxvalues.dtype)
sparse_map[pxnums[0]] = pxvalues[0]
testing.assert_equal(sparse_map[pxnums[0]], full_map[pxnums[0]])
sparse_map[pxnums] = pxvalues
testing.assert_array_almost_equal(sparse_map[pxnums], full_map[pxnums])
if __name__ == '__main__':
unittest.main()
| 36.326316 | 106 | 0.622139 | 1,736 | 13,804 | 4.647465 | 0.086982 | 0.102628 | 0.067675 | 0.089241 | 0.788299 | 0.760659 | 0.729301 | 0.658156 | 0.641919 | 0.576475 | 0 | 0.08768 | 0.263837 | 13,804 | 379 | 107 | 36.422164 | 0.706259 | 0.058824 | 0 | 0.555085 | 0 | 0 | 0.016877 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.055085 | false | 0 | 0.025424 | 0 | 0.084746 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
00f330a388ce1a7d82c86fd61e7daba177f19294 | 317 | py | Python | src/bepasty/storage/__init__.py | Emojigit/bepasty-server | c68524d16f5387ec1df3d717afe83eb195c42b34 | [
"BSD-2-Clause"
] | 123 | 2015-01-01T21:03:29.000Z | 2022-03-27T05:35:25.000Z | src/bepasty/storage/__init__.py | Emojigit/bepasty-server | c68524d16f5387ec1df3d717afe83eb195c42b34 | [
"BSD-2-Clause"
] | 134 | 2015-01-09T20:21:21.000Z | 2022-03-09T09:13:54.000Z | src/bepasty/storage/__init__.py | Emojigit/bepasty-server | c68524d16f5387ec1df3d717afe83eb195c42b34 | [
"BSD-2-Clause"
] | 40 | 2015-01-21T14:13:53.000Z | 2022-01-13T20:39:57.000Z | import importlib
def create_storage(app):
"""
Load specified storage and return the object.
"""
if 'STORAGE' not in app.config:
raise Exception("Missing STORAGE config key")
storage = importlib.import_module('.' + app.config['STORAGE'], __name__)
return storage.create_storage(app)
| 24.384615 | 76 | 0.681388 | 38 | 317 | 5.5 | 0.578947 | 0.124402 | 0.15311 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.208202 | 317 | 12 | 77 | 26.416667 | 0.832669 | 0.141956 | 0 | 0 | 0 | 0 | 0.160156 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
daa88232ba42e410926e1479e121fde2d63b49fe | 711 | py | Python | tests/test_funcs.py | snowskeleton/alg-mirrorer | 914f9fabae20a7198fe427cb7882f36f0bfd687e | [
"Apache-2.0"
] | null | null | null | tests/test_funcs.py | snowskeleton/alg-mirrorer | 914f9fabae20a7198fe427cb7882f36f0bfd687e | [
"Apache-2.0"
] | null | null | null | tests/test_funcs.py | snowskeleton/alg-mirrorer | 914f9fabae20a7198fe427cb7882f36f0bfd687e | [
"Apache-2.0"
] | null | null | null | from funcs import invert
assert invert("F' R U2 R' U2 R' F2 R U R U' R' F'") == "F L' U2 L U2 L F2 L' U' L' U L F", "with spaces"
assert invert("F'RU2R'U2R'F2RURU'R'F'") == "F L' U2 L U2 L F2 L' U' L' U L F", "without spaces"
assert invert("F 'RU 2R'U 2 R'F 2R UR U'R'F'") == "F L' U2 L U2 L F2 L' U' L' U L F", "mixed spacing"
assert invert("r U R' U' M2 U R U' R' U' M'") == "l' U' L U M2 U' L U L U M", "more letters (like M)"
assert invert("() ") == "", "input sanitization"
assert invert("F'") == "F", "single-length prime alg"
assert invert("F") != "F", "single-length non-prime alg"
from funcs import clean
assert clean("asdf\r1234'\n()\"", "RLFBUDrlfbudm'23") == "df23'", "character removal"
| 33.857143 | 104 | 0.585091 | 149 | 711 | 2.791946 | 0.295302 | 0.048077 | 0.057692 | 0.048077 | 0.259615 | 0.259615 | 0.134615 | 0.134615 | 0.134615 | 0.134615 | 0 | 0.049734 | 0.208158 | 711 | 20 | 105 | 35.55 | 0.689165 | 0 | 0 | 0 | 0 | 0 | 0.566807 | 0.030942 | 0 | 0 | 0 | 0 | 0.8 | 1 | 0 | true | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
daacedee12edbacf73dadd8086789b8598cca42e | 79 | py | Python | ratelimiter/__init__.py | wenbobuaa/pykit | 43e38fe40297a1e7a9329bcf3db3554c7ca48ead | [
"MIT"
] | 13 | 2016-12-16T09:23:09.000Z | 2018-03-10T08:04:00.000Z | ratelimiter/__init__.py | wenbobuaa/pykit | 43e38fe40297a1e7a9329bcf3db3554c7ca48ead | [
"MIT"
] | 74 | 2017-03-23T11:36:22.000Z | 2018-04-02T06:19:09.000Z | ratelimiter/__init__.py | drmingdrmer/pykit | e25a71146e81aaf79625cf8d4f4c439ccd515b82 | [
"MIT"
] | 5 | 2016-12-27T07:30:47.000Z | 2018-03-10T07:06:21.000Z | from .ratelimiter import (
RateLimiter
)
__all__ = [
"RateLimiter"
]
| 8.777778 | 26 | 0.632911 | 6 | 79 | 7.666667 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.265823 | 79 | 8 | 27 | 9.875 | 0.793103 | 0 | 0 | 0 | 0 | 0 | 0.139241 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
dab8083e632565b53c23ab65512b7b92a9c17880 | 864 | py | Python | categories/views.py | yuyuyuhaoshi/Blog-BE | a485d5159076d619d4fd6019fe9b96ac04020d4d | [
"Apache-2.0"
] | null | null | null | categories/views.py | yuyuyuhaoshi/Blog-BE | a485d5159076d619d4fd6019fe9b96ac04020d4d | [
"Apache-2.0"
] | null | null | null | categories/views.py | yuyuyuhaoshi/Blog-BE | a485d5159076d619d4fd6019fe9b96ac04020d4d | [
"Apache-2.0"
] | null | null | null | from django_filters import rest_framework as filters
from rest_framework import viewsets
from rest_framework.response import Response
from utils.pagination import CustomPageNumberPagination
from categories.models import Category
from categories.serializers import CategoryListSerializer, CategoryDetailSerializer
class CategoryPageNumberPagination(CustomPageNumberPagination):
page_size = 120
max_page_size = 120
class CategoryViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Category.objects.all()
serializer_class = CategoryListSerializer
http_method_names = ['get',]
pagination_class = CategoryPageNumberPagination
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
serializer = CategoryDetailSerializer(instance, context={'request': request})
return Response(serializer.data)
| 34.56 | 85 | 0.797454 | 83 | 864 | 8.156627 | 0.542169 | 0.057607 | 0.050222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008086 | 0.141204 | 864 | 24 | 86 | 36 | 0.904313 | 0 | 0 | 0 | 0 | 0 | 0.011574 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.333333 | 0 | 0.888889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
dac41063ed019245b465f7bb070c5f27c9f76955 | 379 | py | Python | cms_genome_browser/cms_app.py | mfcovington/djangocms_genome_browser | 5485c9ba21cf610286cb706f0a6a70ba3a8c9891 | [
"BSD-3-Clause"
] | 4 | 2016-06-03T07:28:34.000Z | 2021-11-20T05:31:17.000Z | cms_genome_browser/cms_app.py | mfcovington/djangocms_genome_browser | 5485c9ba21cf610286cb706f0a6a70ba3a8c9891 | [
"BSD-3-Clause"
] | null | null | null | cms_genome_browser/cms_app.py | mfcovington/djangocms_genome_browser | 5485c9ba21cf610286cb706f0a6a70ba3a8c9891 | [
"BSD-3-Clause"
] | 2 | 2016-08-03T03:31:53.000Z | 2019-12-21T06:58:58.000Z | from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
from cms_genome_browser.menu import BrowsersMenu
class BrowserApp(CMSApp):
name = _("Genome Browser App")
urls = ["cms_genome_browser.urls"]
app_name = "cms_genome_browser"
menus = [BrowsersMenu]
apphook_pool.register(BrowserApp)
| 27.071429 | 55 | 0.783641 | 51 | 379 | 5.54902 | 0.470588 | 0.183746 | 0.169611 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.14248 | 379 | 13 | 56 | 29.153846 | 0.870769 | 0 | 0 | 0 | 0 | 0 | 0.155673 | 0.060686 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.9 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
dad06160e13a11e215fd2bc1d0480ad80ab41b7d | 2,633 | py | Python | infra/services/sysmon/cipd_metrics.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | infra/services/sysmon/cipd_metrics.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | infra/services/sysmon/cipd_metrics.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import json
import logging
import os
import sys
from infra_libs import ts_mon
# Where to look for cipd packages.
ALL_VERSION_DIRS = {
'win32': ['C:\\infra-python',
'C:\\infra-tools', # authutil cipd version file is here
'C:\\infra-tools\\.versions'],
'default': ['/opt/infra-python',
'/opt/infra-tools', # authutil cipd version file is here
'/opt/infra-tools/.versions',
],
}
package_instance_id = ts_mon.StringMetric(
'cipd/packages/deployed/instance_id',
description='instance ids of deployed packages.')
def list_cipd_versions(cipd_version_dir):
"""Return all *.cipd_version of CIPD_VERSION.json files found.
Args:
cipd_version_dir (str): path to a directory to look into.
Returns:
cipd_versions_path(list of str): paths to cipd version json files.
"""
paths = []
python_version = os.path.join(cipd_version_dir, 'CIPD_VERSION.json')
if os.path.exists(python_version):
paths.append(python_version)
paths.extend(glob.glob(os.path.join(cipd_version_dir, '*.cipd_version')))
return paths
# TODO(pgervais): Make a common function in infra_libs and use it here and in
# service_manager
def read_cipd_version(cipd_version_file):
"""Read a CIPD_VERSION.json file and validate its content.
Args:
cipd_version_file(str): path to a CIPD_VERSION.json file
Returns:
cipd_version(dict): version information with keys 'instance_id' and
'package_name'. None if any error happened.
"""
try:
with open(cipd_version_file) as f:
cipd_version = json.load(f)
except (OSError, ValueError):
logging.exception('Failed to read file: %s', cipd_version_file)
return None
if ('instance_id' not in cipd_version
or 'package_name' not in cipd_version):
logging.error('Missing key in version file: %s', cipd_version_file)
return None
return cipd_version
def get_cipd_summary():
"""Collect cipd package info."""
version_dirs = ALL_VERSION_DIRS.get(sys.platform,
ALL_VERSION_DIRS['default'])
for version_dir in version_dirs:
cipd_version_files = list_cipd_versions(version_dir)
for cipd_version_file in cipd_version_files:
cipd_version = read_cipd_version(cipd_version_file)
if cipd_version:
package_instance_id.set(cipd_version['instance_id'],
{'package_name': cipd_version['package_name']})
| 29.255556 | 79 | 0.696544 | 371 | 2,633 | 4.719677 | 0.326146 | 0.20731 | 0.077099 | 0.025129 | 0.175899 | 0.153055 | 0.118789 | 0.084523 | 0 | 0 | 0 | 0.002875 | 0.207368 | 2,633 | 89 | 80 | 29.58427 | 0.836128 | 0.317888 | 0 | 0.042553 | 0 | 0 | 0.197827 | 0.049171 | 0 | 0 | 0 | 0.011236 | 0 | 1 | 0.06383 | false | 0 | 0.12766 | 0 | 0.276596 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
daddea9b3c19d31af4a59228cacccd8c9bd91347 | 321 | py | Python | src/squash/api_v1/version.py | lsst-sqre/squash-rest-api | 4c42115418e8770da3fc42e8046e63e6d7e536d7 | [
"MIT"
] | null | null | null | src/squash/api_v1/version.py | lsst-sqre/squash-rest-api | 4c42115418e8770da3fc42e8046e63e6d7e536d7 | [
"MIT"
] | 22 | 2021-01-04T15:20:41.000Z | 2022-03-21T21:02:17.000Z | src/squash/api_v1/version.py | lsst-sqre/squash-rest-api | 4c42115418e8770da3fc42e8046e63e6d7e536d7 | [
"MIT"
] | null | null | null | from flask_restful import Resource
class Version(Resource):
def get(self):
"""
Retrieve the default version of the API.
---
tags:
- Misc
responses:
200:
description: Version successfully retrieved
"""
return {"version": "1.0"}
| 20.0625 | 55 | 0.523364 | 30 | 321 | 5.566667 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025381 | 0.386293 | 321 | 15 | 56 | 21.4 | 0.822335 | 0.389408 | 0 | 0 | 0 | 0 | 0.080645 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
daf811fb4627374f08ea73bd18f5a54f1547e28c | 300 | py | Python | algorithms/strings/make_it_anagram.py | PlamenHristov/HackerRank | 2c875995f0d51d7026c5cf92348d9fb94fa509d6 | [
"MIT"
] | null | null | null | algorithms/strings/make_it_anagram.py | PlamenHristov/HackerRank | 2c875995f0d51d7026c5cf92348d9fb94fa509d6 | [
"MIT"
] | null | null | null | algorithms/strings/make_it_anagram.py | PlamenHristov/HackerRank | 2c875995f0d51d7026c5cf92348d9fb94fa509d6 | [
"MIT"
] | null | null | null | import collections, sys
if __name__ == '__main__':
A = sys.stdin.readline().strip()
B = sys.stdin.readline().strip()
a = collections.Counter(A)
b = collections.Counter(B)
length = sum(min(a[c], b[c]) for c in (set(A) & set(B)))
print((len(A) - length) + (len(B) - length)) | 25 | 60 | 0.59 | 45 | 300 | 3.755556 | 0.466667 | 0.094675 | 0.189349 | 0.248521 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.21 | 300 | 12 | 61 | 25 | 0.71308 | 0 | 0 | 0 | 0 | 0 | 0.026578 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0.125 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
970ff315680c30273213edd0b2f7cb82e2f91257 | 12,348 | py | Python | impedance/tests/test_validation.py | FTHuld/impedance.py | 78519fc88f37f8557d31cd2616e0d50de0671c35 | [
"MIT"
] | 110 | 2018-06-05T04:03:13.000Z | 2022-03-29T04:51:18.000Z | impedance/tests/test_validation.py | FTHuld/impedance.py | 78519fc88f37f8557d31cd2616e0d50de0671c35 | [
"MIT"
] | 167 | 2018-06-06T21:32:34.000Z | 2022-03-28T02:10:55.000Z | impedance/tests/test_validation.py | FTHuld/impedance.py | 78519fc88f37f8557d31cd2616e0d50de0671c35 | [
"MIT"
] | 70 | 2018-10-02T15:01:08.000Z | 2022-03-30T13:19:16.000Z | from impedance.validation import calc_mu, eval_linKK, residuals_linKK
from impedance.validation import get_tc_distribution, linKK, fit_linKK
from impedance import preprocessing
import numpy as np
import pytest
def test_eval_linKK():
Rs = np.array([1, 1, 2, 0.5, 0.1])
ts = np.array([.1, .2])
f = np.array([.01, 1000])
w = 2 * np.pi * f
Z = Rs[0] + (Rs[1]/(1 + ts[0]*1j*w)) + (Rs[2]/(1 + ts[1]*1j*w)) + \
1j*w*Rs[-1] + Rs[-2] / (1j*w)
assert (eval_linKK(Rs, ts, f) == Z).all()
Z_data = Z + np.array([1 + 1j, 1 + 1j])
assert np.isclose(residuals_linKK(Rs, ts, Z_data, f),
(Z_data - Z).real/np.abs(Z_data)).all()
assert np.isclose(residuals_linKK(Rs, ts, Z_data, f, residuals='imag'),
(Z_data - Z).imag/np.abs(Z_data)).all()
diff_real = (Z_data - Z).real/np.abs(Z_data)
diff_imag = (Z_data - Z).imag/np.abs(Z_data)
assert np.isclose(residuals_linKK(Rs, ts, Z_data, f, residuals='both'),
[diff_real[0], diff_imag[0], diff_real[1],
diff_imag[1]]).all()
def test_calc_mu():
Rs = [1, 2, 3, -3, -2, -1]
assert calc_mu(Rs) == 0
Rs = [-1, 2, 4, -3, 4, -1]
assert calc_mu(Rs) == 0.5
def test_get_tc_distribution():
M = 3
f = np.array([1000, 100, 10, 1, .1])/(2 * np.pi)
ts_true = np.array([0.001, 0.1, 10])
assert (ts_true == get_tc_distribution(f, M)).all()
def test_linKK():
# "True" values listed below are from Schonleber's Lin_KK software using
# equivalent analysis options.
fit_true = \
np.array([1504.77272562, 1495.0921936, 1490.00925513, 1491.10559299,
1498.58811243, 1510.99839587, 1525.38888928, 1538.20266473,
1546.8146252, 1551.30214876, 1556.06735902, 1571.02993844,
1611.71804328, 1697.41876182, 1847.27253729, 2074.90120624,
2382.53062569, 2757.22269183, 3173.69682122, 3604.841718,
4032.7702192, 4450.8168238, 4855.22368647, 5234.98555099,
5569.5736037, 5836.53900962, 6021.90189107, 6125.58944854,
6160.67958136, 6149.14874076, 6115.96687044, 6082.88575023,
6064.08068366, 6064.93277516, 6083.22455438, 6111.60127879,
6140.78792197, 6162.82998495, 6173.20167035, 6171.33427209,
6160.03228389, 6144.2248715, 6129.26892121, 6119.3121128,
6116.34899865, 6120.09064219, 6128.37712222, 6137.9720589,
6145.61179356, 6148.94276639, 6146.99955138, 6140.22155443,
6132.74662155]) + \
np.array([2.21601785e+01, -8.08838610e+00, -4.34187137e+01,
-8.26481327e+01, -1.24065234e+02, -1.66329887e+02,
-2.09588207e+02, -2.56542204e+02, -3.12958965e+02,
-3.87301923e+02, -4.89494687e+02, -6.28743075e+02,
-8.10378235e+02, -1.03243330e+03, -1.28325249e+03,
-1.54124807e+03, -1.77798369e+03, -1.96571203e+03,
-2.08731780e+03, -2.14154401e+03, -2.13776738e+03,
-2.08431712e+03, -1.98106122e+03, -1.82274574e+03,
-1.60947048e+03, -1.35439985e+03, -1.08242516e+03,
-8.22442371e+02, -5.99359860e+02, -4.28680765e+02,
-3.14010144e+02, -2.47955686e+02, -2.16112198e+02,
-2.02047736e+02, -1.91387718e+02, -1.74535420e+02,
-1.47914754e+02, -1.13376514e+02, -7.61122378e+01,
-4.22212394e+01, -1.67327105e+01, -2.24573095e+00,
1.62351816e+00, -2.23057042e+00, -9.59473214e+00,
-1.62154318e+01, -1.88331124e+01, -1.58905695e+01,
-7.73327665e+00, 3.80557278e+00, 1.61797537e+01,
2.69105999e+01, 3.28441808e+01]) * 1j
fit_true_cap = \
np.array([1504.77272562, 1495.0921936, 1490.00925513, 1491.10559299,
1498.58811243, 1510.99839587, 1525.38888928, 1538.20266473,
1546.8146252, 1551.30214876, 1556.06735902, 1571.02993844,
1611.71804328, 1697.41876182, 1847.27253729, 2074.90120624,
2382.53062569, 2757.22269183, 3173.69682122, 3604.841718,
4032.7702192, 4450.8168238, 4855.22368647, 5234.98555099,
5569.5736037, 5836.53900962, 6021.90189107, 6125.58944854,
6160.67958136, 6149.14874076, 6115.96687044, 6082.88575023,
6064.08068366, 6064.93277516, 6083.22455438, 6111.60127879,
6140.78792197, 6162.82998495, 6173.20167035, 6171.33427209,
6160.03228389, 6144.2248715, 6129.26892121, 6119.3121128,
6116.34899865, 6120.09064219, 6128.37712222, 6137.9720589,
6145.61179356, 6148.94276639, 6146.99955138, 6140.22155443,
6132.74662155]) + \
np.array([2.21609593e+01, -8.08783856e+00, -4.34183703e+01,
-8.26479751e+01, -1.24065253e+02, -1.66330085e+02,
-2.09588594e+02, -2.56542801e+02, -3.12959803e+02,
-3.87303047e+02, -4.89496157e+02, -6.28744970e+02,
-8.10380655e+02, -1.03243637e+03, -1.28325639e+03,
-1.54125299e+03, -1.77798990e+03, -1.96571986e+03,
-2.08732766e+03, -2.14155642e+03, -2.13778302e+03,
-2.08433681e+03, -1.98108601e+03, -1.82277695e+03,
-1.60950978e+03, -1.35444932e+03, -1.08248743e+03,
-8.22520773e+02, -5.99458563e+02, -4.28805025e+02,
-3.14166579e+02, -2.48152626e+02, -2.16360131e+02,
-2.02359865e+02, -1.91780666e+02, -1.75030113e+02,
-1.48537535e+02, -1.14160550e+02, -7.70992802e+01,
-4.34638524e+01, -1.82970675e+01, -4.21514039e+00,
-8.55820774e-01, -5.35187256e+00, -1.35242179e+01,
-2.11623597e+01, -2.50609297e+01, -2.37309367e+01,
-1.76037019e+01, -8.62054511e+00, 5.36182534e-01,
7.21647174e+00, 9.37882405e+00]) * 1j
f, Z = preprocessing.readZPlot('../impedance.py/data/Circuit3_EIS_1.z')
resids_true_re = (Z - fit_true).real / np.abs(Z)
resids_true_im = (Z - fit_true).imag / np.abs(Z)
M, mu, Z_fit, resids_re, resids_im = linKK(f, Z, c=.69)
assert np.isclose(fit_true, Z_fit).all()
assert np.isclose(resids_true_re, resids_re).all()
assert np.isclose(resids_true_im, resids_im).all()
assert M == 10
assert np.isclose(mu, 0.6843821117885378)
# Test linKK with added capacitance
resids_true_re = (Z - fit_true_cap).real / np.abs(Z)
resids_true_im = (Z - fit_true_cap).imag / np.abs(Z)
M, mu, Z_fit, resids_re, resids_im = linKK(f, Z, c=.69, add_cap=True)
assert np.isclose(fit_true_cap, Z_fit).all()
assert np.isclose(resids_true_re, resids_re).all()
assert np.isclose(resids_true_im, resids_im).all()
assert M == 10
assert np.isclose(mu, 0.6843821117885381)
# Specifing M should give same results as above
M, mu, Z_fit, resids_re, resids_im = \
linKK(f, Z, c=None, max_M=10, add_cap=True)
assert np.isclose(fit_true_cap, Z_fit).all()
assert np.isclose(resids_true_re, resids_re).all()
assert np.isclose(resids_true_im, resids_im).all()
assert M == 10
assert np.isclose(mu, 0.6843821117885381)
def test_fit_linKK():
# "True" values listed below are from Schonleber's Lin_KK software using
# equivalent analysis options.
fit_true_im = \
np.array([1892.16670857, 1735.17866073, 1595.34869919, 1483.62092725,
1404.08871138, 1356.09108761, 1337.54493602, 1347.5270314,
1387.51185068, 1461.26044599, 1573.22397804, 1725.2980715,
1912.70463125, 2121.67049406, 2332.09328647, 2524.93169072,
2689.34607069, 2825.0091077, 2939.93788724, 3047.13537702,
3162.32135172, 3302.86711448, 3486.69461487, 3729.45394636,
4038.84161501, 4407.36385786, 4808.72682252, 5203.80832846,
5555.08167692, 5839.63432187, 6052.37916535, 6200.66510126,
6296.6956546, 6352.15110256, 6375.78018599, 6373.11446064,
6347.45708565, 6301.65201971, 6240.05239619, 6169.58830428,
6098.85243941, 6035.55826473, 5984.31870647, 5946.26167969,
5920.17874695, 5903.99645739, 5895.7929968, 5894.24236205,
5898.63972971, 5908.64317923, 5923.81098604, 5943.08676257,
5959.35300613]) + \
np.array([-70.4776521, 23.83024186, 50.84494114, 25.88677919,
-35.74962294, -122.45215854, -227.18178042, -346.26102872,
-477.20636325, -616.29028254, -756.32307243, -885.6167092,
-989.70684567, -1056.65273354, -1083.59255493,
-1079.56960769, -1062.07484708, -1050.35335609,
-1060.22332082, -1102.06593845, -1180.63979118,
-1294.82054257, -1436.27259811, -1587.4498159,
-1721.03366501, -1804.20226876, -1809.32501067,
-1726.38772704, -1567.6426133, -1360.28285481,
-1133.62730069, -909.94086692, -701.95825097, -514.79535919,
-349.19837452, -204.47489479, -80.59143375, 20.81530157,
96.87089028, 145.37359197, 167.07538881, 166.46139247,
150.31388332, 125.3565128, 96.65765081, 67.28992165,
38.78343211, 11.81232837, -13.17831776, -35.51127909,
-54.0704255, -67.48447146, -73.57222491]) * 1j
fit_true_comp = \
np.array([1636.99559739, 1554.18810328, 1481.23911417, 1424.3900838,
1386.42808216, 1367.90765932, 1369.06145295, 1391.315147,
1438.01800827, 1514.22980492, 1625.24547239, 1773.58370231,
1955.13366155, 2157.00804753, 2360.1665209, 2546.56349679,
2706.05172614, 2838.65071263, 2952.5228364, 3060.81514474,
3179.54588511, 3326.61091254, 3520.64327344, 3777.99027267,
4106.63312453, 4498.45313082, 4925.36485549, 5345.65471926,
5719.32098597, 6021.92053781, 6247.97683411, 6405.23218601,
6506.56600035, 6564.25933403, 6587.42707198, 6581.72881809,
6550.47463369, 6496.61170472, 6424.9457516, 6343.33583694,
6261.61536605, 6188.64620036, 6129.75023804, 6086.26557292,
6056.87606879, 6039.31722373, 6031.55856349, 6032.31830118,
6041.07049939, 6057.67650742, 6081.71810383, 6111.74726757,
6136.91206761]) + \
np.array([17.18695412, 35.89845324, 19.93303201, -24.04618822,
-89.31690932, -171.05398322, -266.73687047, -375.31778525,
-495.7085041, -624.93230106, -756.32867662, -878.75805143,
-978.36531405, -1043.72669304, -1072.17384645,
-1072.52206761, -1061.75528827, -1058.59331398,
-1078.4942003, -1131.66997952, -1222.80054094,
-1350.59588738, -1506.2444795, -1671.1947291,
-1816.50610085, -1907.3683077, -1914.47475074,
-1827.24555106, -1658.77755869, -1438.04893513,
-1196.28141241, -957.17525874, -734.28460084, -533.0719019,
-354.39918838, -197.69465823, -63.23427094, 46.76632237,
128.68462199, 179.70585913, 200.45381383, 195.88528088,
173.62890483, 141.26465518, 104.48271295, 66.69820353,
29.59329382, -6.05425717, -39.59351444, -69.9122427,
-95.18725792, -113.19938882, -120.94168923]) * 1j
f, Z = preprocessing.readZPlot('../impedance.py/data/Circuit3_EIS_1.z')
taus = get_tc_distribution(f, 5)
# improper data types in fitting raise a TypeError
with pytest.raises(ValueError):
fit_linKK(f, taus, 5, Z, fit_type='abcd')
p_values, _ = fit_linKK(f, taus, 5, Z, fit_type='imag')
assert np.isclose(fit_true_im, eval_linKK(p_values, taus, f)).all()
p_values, _ = fit_linKK(f, taus, 5, Z, fit_type='complex')
assert np.isclose(fit_true_comp, eval_linKK(p_values, taus, f)).all()
| 52.322034 | 78 | 0.590298 | 1,580 | 12,348 | 4.531646 | 0.462025 | 0.007542 | 0.035615 | 0.017598 | 0.374581 | 0.364525 | 0.354469 | 0.347765 | 0.339106 | 0.332402 | 0 | 0.553213 | 0.270246 | 12,348 | 235 | 79 | 52.544681 | 0.241372 | 0.026563 | 0 | 0.222798 | 0 | 0 | 0.008075 | 0.006161 | 0 | 0 | 0 | 0 | 0.124352 | 1 | 0.025907 | false | 0 | 0.025907 | 0 | 0.051813 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
971d37fa04103935ccffef4cfa04a607744fd19e | 2,124 | py | Python | gillespie.py | tm-kn/gillespie-ste | b51b4e2c1cd9a12d30e0ac07cd63f1ee9e2bbd52 | [
"MIT"
] | 1 | 2021-09-27T19:47:39.000Z | 2021-09-27T19:47:39.000Z | gillespie.py | tm-kn/gillespie-ste | b51b4e2c1cd9a12d30e0ac07cd63f1ee9e2bbd52 | [
"MIT"
] | 1 | 2021-03-10T13:08:52.000Z | 2021-03-10T13:08:52.000Z | gillespie.py | tm-kn/gillespie-ste | b51b4e2c1cd9a12d30e0ac07cd63f1ee9e2bbd52 | [
"MIT"
] | null | null | null | import math
import random
class GillespiePoint:
def __init__(
self,
*,
time: float,
susceptible_population: int,
infected_population: int,
recovered_population: int
):
self.time = time
self.susceptible_population = susceptible_population
self.infected_population = infected_population
self.recovered_population = recovered_population
def as_dict(self):
return {
"time": self.time,
"susceptible_population": self.susceptible_population,
"infected_population": self.infected_population,
"recovered_population": self.recovered_population,
}
def gillespie(
*,
population: int,
maximum_elapsed_time: float,
start_time: float,
spatial_parameter: float,
rate_of_infection_after_contact: float,
rate_of_cure: float,
infected_population: int
):
susceptible_population = population - infected_population
recovered = 0
yield GillespiePoint(
time=start_time,
susceptible_population=susceptible_population,
infected_population=infected_population,
recovered_population=recovered,
)
time = start_time
while time < maximum_elapsed_time:
if infected_population == 0:
break
w1 = (
rate_of_infection_after_contact
* susceptible_population
* infected_population
/ spatial_parameter
)
w2 = rate_of_cure * infected_population
w = w1 + w2
dt = -math.log(random.uniform(0.0, 1.0)) / w
time += dt
if random.uniform(0.0, 1.0) < w1 / w:
susceptible_population = susceptible_population - 1
infected_population = infected_population + 1
else:
infected_population = infected_population - 1
recovered += 1
yield GillespiePoint(
time=time,
susceptible_population=susceptible_population,
infected_population=infected_population,
recovered_population=recovered,
)
| 29.5 | 66 | 0.632298 | 199 | 2,124 | 6.427136 | 0.21608 | 0.253323 | 0.218921 | 0.140735 | 0.298671 | 0.198593 | 0.172009 | 0.172009 | 0.172009 | 0.172009 | 0 | 0.012847 | 0.303672 | 2,124 | 71 | 67 | 29.915493 | 0.851927 | 0 | 0 | 0.181818 | 0 | 0 | 0.030603 | 0.010358 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0.030303 | 0.015152 | 0.106061 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
972ba77170770a5491170530fa389d3992029025 | 616 | py | Python | core/initializers.py | JohnBoxAnn/TSGL-EEGNet | 8920947338c7167ef1c344237e9351d21de85a8e | [
"MIT"
] | 3 | 2020-09-03T03:16:11.000Z | 2021-11-16T00:02:36.000Z | core/initializers.py | JohnBoxAnn/TSGL-EEGNet | 8920947338c7167ef1c344237e9351d21de85a8e | [
"MIT"
] | 1 | 2021-10-14T01:52:34.000Z | 2021-12-03T14:07:36.000Z | core/initializers.py | JohnBoxAnn/TSGL-EEGNet | 8920947338c7167ef1c344237e9351d21de85a8e | [
"MIT"
] | 4 | 2020-12-09T02:09:28.000Z | 2021-12-13T01:44:47.000Z | # coding:utf-8
import tensorflow as tf
from tensorflow.python.keras.api._v2.keras.initializers import Initializer
class EmbeddingInit(Initializer):
def __init__(self, embeddings):
self.embeddings = embeddings
def __call__(self, shape, dtype):
if not self.embeddings.shape == shape:
raise ValueError(self.embeddings.shape, shape)
return tf.convert_to_tensor(self.embeddings, dtype=dtype)
def get_config(self):
config = {'embeddings': self.embeddings}
base_config = super().get_config()
base_config.update(config)
return base_config | 29.333333 | 74 | 0.694805 | 73 | 616 | 5.643836 | 0.493151 | 0.203884 | 0.116505 | 0.116505 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004115 | 0.211039 | 616 | 21 | 75 | 29.333333 | 0.843621 | 0.019481 | 0 | 0 | 0 | 0 | 0.016584 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0.142857 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
972d902370625eccde1344400ab33f46d9d3ba3b | 1,884 | py | Python | train.py | Panda0406/RE-CNN-empirical | 8c03fa8049d716dd4b95e6752b2d9bd87c122e6b | [
"MIT"
] | 8 | 2019-02-09T11:18:33.000Z | 2019-12-30T10:57:43.000Z | train.py | Panda0406/RE-CNN-empirical | 8c03fa8049d716dd4b95e6752b2d9bd87c122e6b | [
"MIT"
] | null | null | null | train.py | Panda0406/RE-CNN-empirical | 8c03fa8049d716dd4b95e6752b2d9bd87c122e6b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import division
import os,sys
from copy import deepcopy
import random
import numpy as np
from args import load_hyperparameters
from gen_data import load_data
from classifier import Extractor
import torch
import torch.nn as nn
import torch.optim as optim
import torch.utils.data as Data
import torch.nn.functional as F
from torch.autograd import Variable
import torchvision.transforms as T
import torch.optim as optim
# if gpu is to be used
use_cuda = torch.cuda.is_available()
FloatTensor = torch.cuda.FloatTensor if use_cuda else torch.FloatTensor
LongTensor = torch.cuda.LongTensor if use_cuda else torch.LongTensor
ByteTensor = torch.cuda.ByteTensor if use_cuda else torch.ByteTensor
if __name__ == "__main__":
args = load_hyperparameters()
# if gpu is to be used
if use_cuda:
torch.cuda.set_device(args.device)
print "GPU is available!"
else:
print "GPU is not available!"
# "./models" saves the intermediate model files
if not os.path.exists(args.model_save_path):
os.mkdir(args.model_save_path)
if not os.path.exists(args.result_save_path):
os.mkdir(args.result_save_path)
torch.manual_seed(args.seed)
torch.cuda.manual_seed_all(args.seed)
random.seed(args.seed)
np.random.seed(args.seed)
torch.backends.cudnn.deterministic=True
print "loading data..."
#fw2v = '../GoogleNews-vectors-negative300.bin'
fw2v = './data/word_vecs.pkl'
ftrain = './SemEval2010_task8_all_data/SemEval2010_task8_training/TRAIN_FILE.TXT'
ftest = './SemEval2010_task8_all_data/SemEval2010_task8_testing_keys/TEST_FILE_FULL.TXT'
frela = './data/rela2id'
W_matrix, s2id_m, l2id_m, rela2id, id2rela = load_data(ftrain, ftest, frela, fw2v, args)
Classifier = Extractor(W_matrix, s2id_m, l2id_m, id2rela, args)
Classifier.train()
| 31.4 | 92 | 0.740977 | 276 | 1,884 | 4.855072 | 0.358696 | 0.049254 | 0.026866 | 0.029104 | 0.240299 | 0.137313 | 0 | 0 | 0 | 0 | 0 | 0.022336 | 0.168259 | 1,884 | 59 | 93 | 31.932203 | 0.832802 | 0.082272 | 0 | 0.045455 | 0 | 0 | 0.141033 | 0.085897 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.363636 | null | null | 0.068182 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
9744104ffb864b7ecc69228e7cc5b36a099c1042 | 155 | py | Python | output/models/ms_data/attribute/att_q014_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/ms_data/attribute/att_q014_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/ms_data/attribute/att_q014_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from output.models.ms_data.attribute.att_q014_xsd.att_q014 import (
Doc,
InternationalPrice,
)
__all__ = [
"Doc",
"InternationalPrice",
]
| 15.5 | 67 | 0.690323 | 17 | 155 | 5.823529 | 0.764706 | 0.141414 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.048 | 0.193548 | 155 | 9 | 68 | 17.222222 | 0.744 | 0 | 0 | 0 | 0 | 0 | 0.135484 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
975d0af0149628e636f7bccb0fc47a5c73ee325f | 2,832 | py | Python | preprocessing/2.text_cleaning.py | Geneseo/Covid19 | cb69e58e1a7aa3196045111f96a861dc7c8560dd | [
"MIT"
] | 1 | 2020-12-20T05:04:02.000Z | 2020-12-20T05:04:02.000Z | preprocessing/2.text_cleaning.py | Geneseo/Covid19 | cb69e58e1a7aa3196045111f96a861dc7c8560dd | [
"MIT"
] | null | null | null | preprocessing/2.text_cleaning.py | Geneseo/Covid19 | cb69e58e1a7aa3196045111f96a861dc7c8560dd | [
"MIT"
] | null | null | null |
#Cleansing
#remove urls, usernames, NA, special charactars, and numbers
class TwitterCleanuper:
def iterate(self):
for cleanup_method in [self.remove_urls,
self.remove_usernames,
self.remove_na,
self.remove_special_chars,
self.remove_numbers]:
yield cleanup_method
def remove_by_regex(tweets, regexp):
tweets.loc[:, "text"].replace(regexp, "", inplace=True)
return tweets
def remove_urls(self, tweets):
return TwitterCleanuper.remove_by_regex(tweets, regex.compile(r"http.?://[^\s]+[\s]?"))
def remove_na(self, tweets):
return tweets[tweets["text"] != "Not Available"]
def remove_special_chars(self, tweets): # it unrolls the hashtags to normal words
for remove in map(lambda r: regex.compile(regex.escape(r)), [",", ":", "\"", "=", "&", ";", "%", "$",
"@", "%", "^", "*", "(", ")", "{", "}",
"[", "]", "|", "/", "\\", ">", "<", "-",
"!", "?", ".", "'", "_", "\n", "RT",
"--", "---", "#"]):
tweets.loc[:, "text"].replace(remove, "", inplace=True)
return tweets
def remove_usernames(self, tweets):
return TwitterCleanuper.remove_by_regex(tweets, regex.compile(r"@[^\s]+[\s]?"))
def remove_numbers(self, tweets):
return TwitterCleanuper.remove_by_regex(tweets, regex.compile(r"\s?[0-9]+\.?[0-9]*"))
class TwitterData_Cleansing(TwitterData_Initialize):
def __init__(self, previous):
self.processed_data = previous.processed_data
def cleanup(self, cleanuper):
t = self.processed_data
for cleanup_method in cleanuper.iterate():
if not self.is_testing:
t = cleanup_method(t)
else:
if cleanup_method.__name__ != "remove_na":
t = cleanup_method(t)
self.processed_data = t
# Remove Chinese Characthers
self.processed_data['text'] = self.processed_data['text'].str.replace(r'[^\x00-\x7F]+', '')
data = TwitterData_Cleansing(data)
data.cleanup(TwitterCleanuper()) # implement text cleansing.
# remove uncessary space between text.
def ls_strip(x):
return x.strip()
data.processed_data['text'] = data.processed_data.apply(lambda row: ls_strip(row['text']), axis=1)
# lower case
def lower_case(x):
return x.lower()
data.processed_data['text'] = data.processed_data.apply(lambda row: lower_case(row['text']), axis=1)
| 38.27027 | 109 | 0.521893 | 282 | 2,832 | 5.049645 | 0.287234 | 0.091292 | 0.059691 | 0.053371 | 0.254213 | 0.254213 | 0.20927 | 0.20927 | 0.20927 | 0.20927 | 0 | 0.004717 | 0.326271 | 2,832 | 73 | 110 | 38.794521 | 0.741614 | 0.073799 | 0 | 0.083333 | 0 | 0.020833 | 0.063553 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.229167 | false | 0 | 0 | 0.125 | 0.4375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
975ff2dcba6b45752527510ea946f520f8b3ecc0 | 2,753 | py | Python | GetOldTweets3/manager/TweetCriteria.py | giulionf/GetOldTweets3 | 038b8fed7da27300e6c611d3c0fd617588075a58 | [
"MIT"
] | null | null | null | GetOldTweets3/manager/TweetCriteria.py | giulionf/GetOldTweets3 | 038b8fed7da27300e6c611d3c0fd617588075a58 | [
"MIT"
] | null | null | null | GetOldTweets3/manager/TweetCriteria.py | giulionf/GetOldTweets3 | 038b8fed7da27300e6c611d3c0fd617588075a58 | [
"MIT"
] | null | null | null | class TweetCriteria:
"""Search parameters class"""
def __init__(self):
self.max_tweets = 0
self.top_tweets = False
self.within = "15mi"
self.username = None
self.since = None
self.until = None
self.near = None
self.query_search = None
self.lang = None
def set_username(self, username):
"""Set username(s) of tweets author(s)
Examples:
setUsername('barackobama')
setUsername('barackobama,whitehouse')
setUsername('barackobama whitehouse')
setUsername(['barackobama','whitehouse'])
Parameters
----------
username : str or iterable
If `username' is specified by str it should be a single username or
usernames separeated by spaces or commas.
`username` can contain a leading @
"""
self.username = username
return self
def set_since(self, since):
"""Set a lower bound date in UTC
Parameters
----------
since : str,
format: "yyyy-mm-dd"
"""
self.since = since
return self
def set_until(self, until):
"""Set an upper bound date in UTC (not included in results)
Parameters
----------
until : str,
format: "yyyy-mm-dd"
"""
self.until = until
return self
def set_near(self, near):
"""Set location to search nearby
Parameters
----------
near : str,
for example "Berlin, Germany"
"""
self.near = near
return self
def set_within(self, within):
"""Set the radius for search by location
Parameters
----------
within : str,
for example "15mi"
"""
self.within = within
return self
def set_query_search(self, query_search):
"""Set a text to be searched for
Parameters
----------
query_search : str
"""
self.query_search = query_search
return self
def set_max_tweets(self, max_tweets):
"""Set the maximum number of tweets to search
Parameters
----------
max_tweets : int
"""
self.max_tweets = max_tweets
return self
def set_lang(self, lang):
"""Set language
Parameters
----------
lang : str
"""
self.lang = lang
return self
def set_top_tweets(self, top_tweets):
"""Set the flag to search only for top tweets
Parameters
----------
top_tweets : bool
"""
self.top_tweets = top_tweets
return self
| 24.801802 | 75 | 0.515438 | 286 | 2,753 | 4.853147 | 0.283217 | 0.038905 | 0.074928 | 0.092219 | 0.099424 | 0.099424 | 0 | 0 | 0 | 0 | 0 | 0.002924 | 0.378859 | 2,753 | 110 | 76 | 25.027273 | 0.808772 | 0.409372 | 0 | 0.236842 | 0 | 0 | 0.003387 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.263158 | false | 0 | 0 | 0 | 0.526316 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
97640101c4f9fd2e2fe2a3a4a5fcec4b9f613628 | 2,351 | py | Python | python/src/main/python/pyalink/alink/stream/special_operators.py | wenwei8268/Alink | c00702538c95a32403985ebd344eb6aeb81749a7 | [
"Apache-2.0"
] | null | null | null | python/src/main/python/pyalink/alink/stream/special_operators.py | wenwei8268/Alink | c00702538c95a32403985ebd344eb6aeb81749a7 | [
"Apache-2.0"
] | null | null | null | python/src/main/python/pyalink/alink/stream/special_operators.py | wenwei8268/Alink | c00702538c95a32403985ebd344eb6aeb81749a7 | [
"Apache-2.0"
] | null | null | null | from .common import FtrlPredictStreamOp as _FtrlPredictStreamOp
from .common import FtrlTrainStreamOp as _FtrlTrainStreamOp
from .common import PyScalarFnStreamOp as _PyScalarFnStreamOp
from .common import PyTableFnStreamOp as _PyTableFnStreamOp
from ..py4j_util import get_java_class
from ..stream import StreamOperator
from ..udf.utils import do_set_op_udf, do_set_op_udtf
__all__ = ['UDFStreamOp', 'UDTFStreamOp', 'FtrlTrainStreamOp', 'FtrlPredictStreamOp', 'TableSourceStreamOp']
class UDFStreamOp(_PyScalarFnStreamOp):
def __init__(self, *args, **kwargs):
super(UDFStreamOp, self).__init__(*args, **kwargs)
def setFunc(self, val):
"""
set UDF: object with eval attribute, lambda function, or PyFlink udf object
"""
return do_set_op_udf(self, val)
class UDTFStreamOp(_PyTableFnStreamOp):
def __init__(self, *args, **kwargs):
super(UDTFStreamOp, self).__init__(*args, **kwargs)
def setFunc(self, val):
"""
set UDTF: object with eval attribute or lambda function
"""
return do_set_op_udtf(self, val)
class FtrlTrainStreamOp(_FtrlTrainStreamOp):
def __init__(self, model, *args, **kwargs):
self.model = model
super(FtrlTrainStreamOp, self).__init__(model=model, *args, **kwargs)
def linkFrom(self, *args):
self.inputs = [self.model] + [x for x in args]
return super(FtrlTrainStreamOp, self).linkFrom(*args)
class FtrlPredictStreamOp(_FtrlPredictStreamOp):
def __init__(self, model, *args, **kwargs):
self.model = model
super(FtrlPredictStreamOp, self).__init__(model=model, *args, **kwargs)
def linkFrom(self, *args):
self.inputs = [self.model] + [x for x in args]
return super(FtrlPredictStreamOp, self).linkFrom(*args)
class TableSourceStreamOp(StreamOperator):
def __init__(self, table, *args, **kwargs):
from pyflink.table import Table
if not isinstance(table, Table):
raise ValueError("Invalid table: only accept PyFlink Table")
table_source_stream_op_cls = get_java_class("com.alibaba.alink.operator.stream.source.TableSourceStreamOp")
# noinspection PyProtectedMember
j_op = table_source_stream_op_cls(table._j_table)
super(TableSourceStreamOp, self).__init__(j_op=j_op, *args, **kwargs)
| 37.31746 | 115 | 0.706508 | 266 | 2,351 | 5.93609 | 0.24812 | 0.063331 | 0.034832 | 0.012666 | 0.277391 | 0.249525 | 0.216593 | 0.216593 | 0.216593 | 0.168461 | 0 | 0.000525 | 0.190132 | 2,351 | 62 | 116 | 37.919355 | 0.828782 | 0.069332 | 0 | 0.3 | 0 | 0 | 0.083139 | 0.028024 | 0 | 0 | 0 | 0 | 0 | 1 | 0.225 | false | 0 | 0.2 | 0 | 0.65 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.