hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
71cbd7f3b7f112a1376f90c79cee886cc107a23b | 553 | py | Python | worlds/migrations/0012_auto_20210804_1907.py | cognitive-space/warpzone | 06acee2add83cf9ddf981b4e4187dd742e627561 | [
"MIT"
] | 1 | 2022-02-25T12:04:13.000Z | 2022-02-25T12:04:13.000Z | worlds/migrations/0012_auto_20210804_1907.py | cognitive-space/warpzone | 06acee2add83cf9ddf981b4e4187dd742e627561 | [
"MIT"
] | null | null | null | worlds/migrations/0012_auto_20210804_1907.py | cognitive-space/warpzone | 06acee2add83cf9ddf981b4e4187dd742e627561 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.6 on 2021-08-04 19:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('worlds', '0011_alter_job_status'),
]
operations = [
migrations.AddField(
model_name='job',
name='failed',
field=models.PositiveSmallIntegerField(default=0),
),
migrations.AddField(
model_name='job',
name='succeeded',
field=models.PositiveSmallIntegerField(default=0),
),
]
| 23.041667 | 62 | 0.584087 | 53 | 553 | 6 | 0.660377 | 0.113208 | 0.144654 | 0.169811 | 0.490566 | 0.213836 | 0 | 0 | 0 | 0 | 0 | 0.054545 | 0.303797 | 553 | 23 | 63 | 24.043478 | 0.771429 | 0.081374 | 0 | 0.470588 | 1 | 0 | 0.094862 | 0.041502 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.058824 | 0 | 0.235294 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
71d1f08a1eacab1f41a2f0a1d89f6295f9fd4610 | 208 | py | Python | src/rmclib/RMState.py | realead/rmc | 6dafa2a4b5ab7199e86e86a4c10388bc8e472bb6 | [
"MIT"
] | null | null | null | src/rmclib/RMState.py | realead/rmc | 6dafa2a4b5ab7199e86e86a4c10388bc8e472bb6 | [
"MIT"
] | null | null | null | src/rmclib/RMState.py | realead/rmc | 6dafa2a4b5ab7199e86e86a4c10388bc8e472bb6 | [
"MIT"
] | null | null | null | #stores the current state of the register machine for the interpreter
class RMState:
def __init__(self, REGS):
self.b = 1
self.acc = 0
self.REGS = REGS
self.ended = False
| 23.111111 | 69 | 0.620192 | 29 | 208 | 4.310345 | 0.724138 | 0.128 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013986 | 0.3125 | 208 | 8 | 70 | 26 | 0.86014 | 0.326923 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
71d9dd63a036674b5b5bc99e702ed1f69fb03a42 | 1,226 | py | Python | oatomobile/tf/types.py | jannikwagner/oatomobile | 971fa3941ad5c6476a3963bbd11181bb607311b9 | [
"Apache-2.0"
] | 152 | 2020-07-11T06:30:19.000Z | 2022-03-26T15:44:45.000Z | oatomobile/tf/types.py | jannikwagner/oatomobile | 971fa3941ad5c6476a3963bbd11181bb607311b9 | [
"Apache-2.0"
] | 13 | 2020-07-31T00:10:55.000Z | 2022-02-22T13:58:22.000Z | oatomobile/tf/types.py | jannikwagner/oatomobile | 971fa3941ad5c6476a3963bbd11181bb607311b9 | [
"Apache-2.0"
] | 36 | 2020-07-12T10:51:58.000Z | 2022-02-18T03:51:45.000Z | # Copyright 2020 The OATomobile Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Type definitions used in baselines."""
from typing import Any
from typing import Iterable
from typing import Mapping
from typing import Union
import numpy as np
import tensorflow as tf
from oatomobile import types
Shape = types.Shape
Tensor = tf.Tensor
Array = Union[types.Scalar, np.ndarray, Tensor]
NestedArray = Union[Array, Iterable["NestedArray"], Mapping[Any, "NestedArray"]]
NestedTensor = Union[Tensor, Iterable["NestedTensor"], Mapping[Any,
"NestedTensor"]]
| 37.151515 | 80 | 0.680261 | 156 | 1,226 | 5.346154 | 0.564103 | 0.071942 | 0.076739 | 0.038369 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008016 | 0.185971 | 1,226 | 32 | 81 | 38.3125 | 0.827655 | 0.568516 | 0 | 0 | 0 | 0 | 0.090196 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.538462 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
71da370045d504ed3ecc793251780b8b7561355f | 1,869 | py | Python | tutorials/classes_intro.py | shiblon/pytour | 71a181ec16fd38b0af62f55e28a50e91790733b9 | [
"Apache-2.0"
] | 2 | 2016-04-30T00:12:50.000Z | 2018-11-14T20:47:55.000Z | tutorials/classes_intro.py | shiblon/pytour | 71a181ec16fd38b0af62f55e28a50e91790733b9 | [
"Apache-2.0"
] | 2 | 2020-02-17T22:31:09.000Z | 2020-02-18T04:31:55.000Z | tutorials/classes_intro.py | shiblon/pytour | 71a181ec16fd38b0af62f55e28a50e91790733b9 | [
"Apache-2.0"
] | 3 | 2018-03-26T17:41:40.000Z | 2019-06-28T12:53:47.000Z | # vim:tw=50
"""Classes are Types
Let's move on to **classes**. We've been using
them already without directly talking about it, so
let's get down to what they really are.
In general, you can think of a class as a
**type**. This is, of course, merely a useful
fiction because it hides subtlety, but it is still
a great way to think about it, because classes
allow you to create a bunch of things that are the
same _kind_ or _type_ of thing. We'll learn how to
make our own types in the coming slides.
Calling a class makes a new **instance** of it.
If you think of a class as a blueprint for, say, a
house, an instance is the actual house you build
by following the plan.
Some basic properties of classes are demonstrated
in the example code by looking at |ValueError|,
which is a class we've seen and used before.
You've seen a lot of other classes already, such
as |list|, |tuple|, |dict|, |int|, |float|, and
others. We've been referring to them as
"callables", because they are, but that's because
_all_ classes are callable: calling one creates an
instance.
"""
# What is this type of thing anyway?
print "What's a ValueError class?"
print " ", repr(ValueError)
# Make a new instance of ValueError by calling it.
ex = ValueError("My super informative error message")
# What is this?
# Note how "repr" in this case shows you how to
# make one, which can be really useful.
print "What's a ValueError instance?"
print " ", repr(ex)
print "What (non-special) stuff is inside of it?"
print " " + "\n ".join(x for x in dir(ex) if x[:2] != '__')
# Now, there are various ways of getting at the
# message:
print "args: \t", ex.args
print "message:\t", ex.message
print "str: \t", str(ex)
# But "str" just calls the __str__ method:
print "__str__:\t", ex.__str__()
# And since it has a __str__ method, print can use
# it directly:
print "Bare: \t", ex
| 30.145161 | 61 | 0.716961 | 335 | 1,869 | 3.928358 | 0.447761 | 0.018237 | 0.012158 | 0.019757 | 0.056231 | 0.024316 | 0 | 0 | 0 | 0 | 0 | 0.00198 | 0.189406 | 1,869 | 61 | 62 | 30.639344 | 0.866667 | 0.186731 | 0 | 0 | 0 | 0 | 0.422222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.916667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
71e2ceb3e636e78a982fce9d86562e81d9673fef | 1,052 | py | Python | mrt_file_server/utils/nbt_utils.py | Frumple/mrt-file-server | 659500b6d991a29d8836dd4388a5e9694240a263 | [
"MIT"
] | 2 | 2018-07-10T23:44:38.000Z | 2019-10-06T18:17:50.000Z | mrt_file_server/utils/nbt_utils.py | Frumple/mrt-file-server | 659500b6d991a29d8836dd4388a5e9694240a263 | [
"MIT"
] | 1 | 2019-04-16T07:34:49.000Z | 2019-04-17T22:13:40.000Z | mrt_file_server/utils/nbt_utils.py | Frumple/mrt-file-server | 659500b6d991a29d8836dd4388a5e9694240a263 | [
"MIT"
] | 1 | 2019-04-15T15:32:34.000Z | 2019-04-15T15:32:34.000Z | from nbt.nbt import *
import io
import gzip
def load_compressed_nbt_file(filename):
return NBTFile(filename)
def load_compressed_nbt_buffer(compressed_buffer):
uncompresssed_buffer = gzip.decompress(compressed_buffer)
bytes_io = io.BytesIO(uncompresssed_buffer)
return load_uncompressed_nbt_buffer(bytes_io)
def load_uncompressed_nbt_buffer(uncompresssed_buffer):
return NBTFile(buffer=uncompresssed_buffer)
def save_compressed_nbt_file(nbt):
nbt.write_file()
def get_nbt_map_value(nbt, tag_name):
data = get_nbt_tag(nbt, "data")
if data is None:
return None
tag = get_nbt_tag(data, tag_name)
return tag.value if tag is not None else None
def get_nbt_tag(parent, name):
try:
return parent.__getitem__(name)
except KeyError:
return None
def set_nbt_map_byte_value(nbt, tag_name, value):
data = get_nbt_tag(nbt, "data")
set_nbt_byte_value(data, tag_name, value)
def set_nbt_byte_value(parent, name, value):
tag = TAG_Byte(name)
tag.name = name
tag.value = int(value)
parent.__setitem__(name, tag)
| 25.047619 | 59 | 0.773764 | 163 | 1,052 | 4.638037 | 0.245399 | 0.047619 | 0.047619 | 0.05291 | 0.05291 | 0.05291 | 0 | 0 | 0 | 0 | 0 | 0 | 0.140684 | 1,052 | 41 | 60 | 25.658537 | 0.836283 | 0 | 0 | 0.125 | 0 | 0 | 0.007605 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.09375 | 0.0625 | 0.5625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
71e577fb93acad3c8d1023d69c4e2167af691506 | 803 | py | Python | test_my_code.py | wma-spu/welcome | c437edc3467b4415f8a36dcdc3a8f60ba1182739 | [
"MIT"
] | null | null | null | test_my_code.py | wma-spu/welcome | c437edc3467b4415f8a36dcdc3a8f60ba1182739 | [
"MIT"
] | null | null | null | test_my_code.py | wma-spu/welcome | c437edc3467b4415f8a36dcdc3a8f60ba1182739 | [
"MIT"
] | 1 | 2021-06-06T03:51:31.000Z | 2021-06-06T03:51:31.000Z | from types import ModuleType
import unittest
import importlib
import inspect
class TestMyCode(unittest.TestCase):
"""
Test module `my_code.py` on below criteria:
- Module is importable.
- Module has at least two functions.
"""
def test_import_module(self):
try:
user_mudule = importlib.import_module('my_code')
except:
self.fail('Loading module `my_code.py` failed.')
self.assertEqual(type(user_mudule), ModuleType, msg='Not a module.')
def test_number_of_functions(self):
user_mudule = importlib.import_module('my_code')
callables = inspect.getmembers(user_mudule, inspect.isfunction)
self.assertGreater(len(callables), 1,
msg='Not enough functions! Make some more!')
| 27.689655 | 76 | 0.65878 | 95 | 803 | 5.410526 | 0.536842 | 0.062257 | 0.093385 | 0.054475 | 0.143969 | 0.143969 | 0.143969 | 0 | 0 | 0 | 0 | 0.001653 | 0.246575 | 803 | 28 | 77 | 28.678571 | 0.847934 | 0.13076 | 0 | 0.125 | 0 | 0 | 0.146884 | 0 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.125 | false | 0 | 0.4375 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
71e63ceed29402a2de94841071ca4640644ffd75 | 483 | gyp | Python | ccolors.gyp | divanvisagie/colors.cc | b1621ca2edf50517dbe8599c3803c66dc805bfa7 | [
"FSFAP"
] | 1 | 2017-04-30T11:49:47.000Z | 2017-04-30T11:49:47.000Z | ccolors.gyp | divanvisagie/colors.cc | b1621ca2edf50517dbe8599c3803c66dc805bfa7 | [
"FSFAP"
] | 1 | 2018-12-15T20:35:50.000Z | 2018-12-15T20:35:50.000Z | ccolors.gyp | divanvisagie/colors.cc | b1621ca2edf50517dbe8599c3803c66dc805bfa7 | [
"FSFAP"
] | null | null | null | {
'targets' : [
{
'target_name' : 'test',
'type' : 'executable',
'sources' : [
'<!@(find *.cc)',
'<!@(find *.h)'
],
'include_dirs' : [
],
'libraries' : [
],
'conditions' : [
['OS=="mac"', {
'xcode_settings': {
'ARCHS': '$(ARCHS_STANDARD_64_BIT)'
},
'link_settings': {
'libraries': [
],
},
}]
]
}
]
} | 16.1 | 47 | 0.3147 | 27 | 483 | 5.37037 | 0.851852 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007937 | 0.478261 | 483 | 30 | 48 | 16.1 | 0.56746 | 0 | 0 | 0.222222 | 0 | 0 | 0.36157 | 0.049587 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e082b3859530823c9c53406f285e26c6414d4679 | 679 | py | Python | pyshortener/exceptions.py | ycngmn/Shrinkit-Bot | 59628b1dfa2c5c309015347ce4df2590ff5065bb | [
"Apache-2.0"
] | null | null | null | pyshortener/exceptions.py | ycngmn/Shrinkit-Bot | 59628b1dfa2c5c309015347ce4df2590ff5065bb | [
"Apache-2.0"
] | null | null | null | pyshortener/exceptions.py | ycngmn/Shrinkit-Bot | 59628b1dfa2c5c309015347ce4df2590ff5065bb | [
"Apache-2.0"
] | null | null | null | class ShorteningErrorException(Exception):
def __init__(self, message=None):
super().__init__(f'There was an error on trying to short the url: '
f'{message}')
class ExpandingErrorException(Exception):
def __init__(self, message=None):
super().__init__(f'There was an error on trying to expand the url: '
f'{message}')
class BadAPIResponseException(Exception):
def __init__(self, message):
super().__init__(f'Error on API Response: {message}')
class BadURLException(Exception):
def __init__(self, message):
super().__init__(f'URL is not valid: {message}')
| 32.333333 | 77 | 0.628866 | 76 | 679 | 5.197368 | 0.381579 | 0.121519 | 0.162025 | 0.202532 | 0.617722 | 0.521519 | 0.521519 | 0.521519 | 0.334177 | 0.334177 | 0 | 0 | 0.26215 | 679 | 20 | 78 | 33.95 | 0.788423 | 0 | 0 | 0.428571 | 0 | 0 | 0.261002 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0853c0731ae4356fc59fb628e74e12aea906dac | 10,397 | py | Python | pysnmp/CISCO-VISM-CAS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CISCO-VISM-CAS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CISCO-VISM-CAS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-VISM-CAS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-VISM-CAS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:02:02 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint")
voice, = mibBuilder.importSymbols("BASIS-MIB", "voice")
ciscoWan, = mibBuilder.importSymbols("CISCOWAN-SMI", "ciscoWan")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Integer32, Unsigned32, iso, IpAddress, Bits, Gauge32, MibIdentifier, NotificationType, ModuleIdentity, TimeTicks, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Integer32", "Unsigned32", "iso", "IpAddress", "Bits", "Gauge32", "MibIdentifier", "NotificationType", "ModuleIdentity", "TimeTicks", "Counter64")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ciscoVismCasMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 351, 150, 88))
ciscoVismCasMIB.setRevisions(('2003-07-16 00:00',))
if mibBuilder.loadTexts: ciscoVismCasMIB.setLastUpdated('200307160000Z')
if mibBuilder.loadTexts: ciscoVismCasMIB.setOrganization('Cisco Systems, Inc.')
vismCasGrp = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8))
vismCasVariantTable = MibTable((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1), )
if mibBuilder.loadTexts: vismCasVariantTable.setStatus('current')
vismCasVariantEntry = MibTableRow((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1), ).setIndexNames((0, "CISCO-VISM-CAS-MIB", "vismCasVariantName"))
if mibBuilder.loadTexts: vismCasVariantEntry.setStatus('current')
vismCasVariantName = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vismCasVariantName.setStatus('current')
vismCasFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(2, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasFileName.setStatus('current')
vismCasTRinging = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 600)).clone(180)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasTRinging.setStatus('deprecated')
vismCasDigitMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("mf", 1), ("dtmf", 2))).clone('dtmf')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasDigitMethod.setStatus('current')
vismCasInterdigitTpart = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 10000)).clone(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasInterdigitTpart.setStatus('current')
vismCasInterdigitTcrit = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10000)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasInterdigitTcrit.setStatus('current')
vismCasInterdigitTMF = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasInterdigitTMF.setStatus('current')
vismCasVariantState = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notConfigured", 1), ("configInProgress", 2), ("configured", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vismCasVariantState.setStatus('current')
vismCasRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 4, 6))).clone(namedValues=NamedValues(("active", 1), ("createAndGo", 4), ("destroy", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasRowStatus.setStatus('current')
vismCasCountryCode = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 2)).clone('US')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasCountryCode.setStatus('deprecated')
vismCasVariantSource = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unspecified", 1), ("internal", 2), ("external", 3))).clone('unspecified')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasVariantSource.setStatus('current')
vismCasXgcpVariantTable = MibTable((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 2), )
if mibBuilder.loadTexts: vismCasXgcpVariantTable.setStatus('current')
vismCasXgcpVariantEntry = MibTableRow((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 2, 1), ).setIndexNames((0, "CISCO-VISM-CAS-MIB", "vismCasXgcpVariantName"))
if mibBuilder.loadTexts: vismCasXgcpVariantEntry.setStatus('current')
vismCasXgcpVariantName = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 2, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vismCasXgcpVariantName.setStatus('current')
vismCasXgcpFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vismCasXgcpFileName.setStatus('current')
vismCasXgcpMaxReXmitTime = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 10000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasXgcpMaxReXmitTime.setStatus('current')
vismCasXgcpInitialReXmitTime = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 10000)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasXgcpInitialReXmitTime.setStatus('current')
vismCasXgcpMaxRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 5, 8, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10)).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vismCasXgcpMaxRetries.setStatus('current')
ciscoVismCasMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 88, 2))
ciscoVismCasMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 88, 2, 1))
ciscoVismCasMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 88, 2, 2))
ciscoVismCasCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 351, 150, 88, 2, 2, 1)).setObjects(("CISCO-VISM-CAS-MIB", "ciscoVismCasVariantGroup"), ("CISCO-VISM-CAS-MIB", "ciscoVismCasXgcpVariantGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoVismCasCompliance = ciscoVismCasCompliance.setStatus('current')
ciscoVismCasVariantGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 351, 150, 88, 2, 1, 1)).setObjects(("CISCO-VISM-CAS-MIB", "vismCasVariantName"), ("CISCO-VISM-CAS-MIB", "vismCasFileName"), ("CISCO-VISM-CAS-MIB", "vismCasDigitMethod"), ("CISCO-VISM-CAS-MIB", "vismCasInterdigitTpart"), ("CISCO-VISM-CAS-MIB", "vismCasInterdigitTcrit"), ("CISCO-VISM-CAS-MIB", "vismCasInterdigitTMF"), ("CISCO-VISM-CAS-MIB", "vismCasVariantState"), ("CISCO-VISM-CAS-MIB", "vismCasRowStatus"), ("CISCO-VISM-CAS-MIB", "vismCasVariantSource"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoVismCasVariantGroup = ciscoVismCasVariantGroup.setStatus('current')
ciscoVismCasXgcpVariantGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 351, 150, 88, 2, 1, 2)).setObjects(("CISCO-VISM-CAS-MIB", "vismCasXgcpVariantName"), ("CISCO-VISM-CAS-MIB", "vismCasXgcpFileName"), ("CISCO-VISM-CAS-MIB", "vismCasXgcpMaxReXmitTime"), ("CISCO-VISM-CAS-MIB", "vismCasXgcpInitialReXmitTime"), ("CISCO-VISM-CAS-MIB", "vismCasXgcpMaxRetries"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoVismCasXgcpVariantGroup = ciscoVismCasXgcpVariantGroup.setStatus('current')
cvcVariantDeprecatedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 351, 150, 88, 2, 1, 3)).setObjects(("CISCO-VISM-CAS-MIB", "vismCasTRinging"), ("CISCO-VISM-CAS-MIB", "vismCasCountryCode"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cvcVariantDeprecatedGroup = cvcVariantDeprecatedGroup.setStatus('deprecated')
mibBuilder.exportSymbols("CISCO-VISM-CAS-MIB", vismCasXgcpMaxRetries=vismCasXgcpMaxRetries, ciscoVismCasMIBConformance=ciscoVismCasMIBConformance, vismCasVariantSource=vismCasVariantSource, ciscoVismCasCompliance=ciscoVismCasCompliance, vismCasGrp=vismCasGrp, vismCasXgcpVariantName=vismCasXgcpVariantName, vismCasXgcpVariantTable=vismCasXgcpVariantTable, vismCasDigitMethod=vismCasDigitMethod, vismCasInterdigitTcrit=vismCasInterdigitTcrit, vismCasVariantState=vismCasVariantState, ciscoVismCasMIBCompliances=ciscoVismCasMIBCompliances, vismCasXgcpFileName=vismCasXgcpFileName, vismCasTRinging=vismCasTRinging, vismCasCountryCode=vismCasCountryCode, vismCasVariantName=vismCasVariantName, vismCasFileName=vismCasFileName, vismCasXgcpMaxReXmitTime=vismCasXgcpMaxReXmitTime, vismCasInterdigitTMF=vismCasInterdigitTMF, ciscoVismCasXgcpVariantGroup=ciscoVismCasXgcpVariantGroup, vismCasVariantTable=vismCasVariantTable, vismCasXgcpInitialReXmitTime=vismCasXgcpInitialReXmitTime, vismCasVariantEntry=vismCasVariantEntry, ciscoVismCasMIBGroups=ciscoVismCasMIBGroups, ciscoVismCasVariantGroup=ciscoVismCasVariantGroup, vismCasRowStatus=vismCasRowStatus, vismCasInterdigitTpart=vismCasInterdigitTpart, ciscoVismCasMIB=ciscoVismCasMIB, cvcVariantDeprecatedGroup=cvcVariantDeprecatedGroup, vismCasXgcpVariantEntry=vismCasXgcpVariantEntry, PYSNMP_MODULE_ID=ciscoVismCasMIB)
| 133.294872 | 1,363 | 0.764163 | 1,143 | 10,397 | 6.949256 | 0.152231 | 0.008057 | 0.010953 | 0.014604 | 0.398716 | 0.283898 | 0.2562 | 0.199799 | 0.184313 | 0.182551 | 0 | 0.075164 | 0.078677 | 10,397 | 77 | 1,364 | 135.025974 | 0.754045 | 0.03174 | 0 | 0.057971 | 0 | 0 | 0.176775 | 0.025552 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.115942 | 0 | 0.115942 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e08b3721c6656e32f7c08ed1b1c8e18f417fb318 | 21,930 | py | Python | pysnmp/CISCO-WIRELESS-P2P-BPI-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CISCO-WIRELESS-P2P-BPI-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CISCO-WIRELESS-P2P-BPI-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-WIRELESS-P2P-BPI-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-WIRELESS-P2P-BPI-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:05:17 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
ModuleIdentity, TimeTicks, NotificationType, MibIdentifier, Unsigned32, ObjectIdentity, IpAddress, Counter32, Bits, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Gauge32, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "TimeTicks", "NotificationType", "MibIdentifier", "Unsigned32", "ObjectIdentity", "IpAddress", "Counter32", "Bits", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Gauge32", "Counter64")
TruthValue, TimeInterval, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TimeInterval", "DisplayString", "TextualConvention")
ciscoWirelessP2pBpiMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 135))
if mibBuilder.loadTexts: ciscoWirelessP2pBpiMIB.setLastUpdated('9905181200Z')
if mibBuilder.loadTexts: ciscoWirelessP2pBpiMIB.setOrganization('Cisco Systems Inc.')
cwrBpiMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 135, 1))
cwrBpiRsObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1))
cwrBpiRsBaseTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1), )
if mibBuilder.loadTexts: cwrBpiRsBaseTable.setStatus('current')
cwrBpiRsBaseEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: cwrBpiRsBaseEntry.setStatus('current')
cwrBpiRsPrivacyEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRsPrivacyEnable.setStatus('current')
cwrBpiRsPublicKey = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 126))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsPublicKey.setStatus('current')
cwrBpiRsAuthState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("start", 1), ("authWait", 2), ("authorized", 3), ("reauthWait", 4), ("authRejectWait", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsAuthState.setStatus('current')
cwrBpiRsAuthKeySequenceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsAuthKeySequenceNumber.setStatus('current')
cwrBpiRsAuthExpires = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 5), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsAuthExpires.setStatus('current')
cwrBpiRsAuthReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 6), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRsAuthReset.setStatus('current')
cwrBpiRsAuthGraceTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1800))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRsAuthGraceTime.setStatus('current')
cwrBpiRsTEKGraceTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1800))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRsTEKGraceTime.setStatus('current')
cwrBpiRsAuthWaitTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 30))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRsAuthWaitTimeout.setStatus('current')
cwrBpiRsReauthWaitTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 30))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRsReauthWaitTimeout.setStatus('current')
cwrBpiRsOpWaitTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRsOpWaitTimeout.setStatus('current')
cwrBpiRsRekeyWaitTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRsRekeyWaitTimeout.setStatus('current')
cwrBpiRsAuthRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsAuthRequests.setStatus('current')
cwrBpiRsAuthReplies = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsAuthReplies.setStatus('current')
cwrBpiRsAuthInvalids = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsAuthInvalids.setStatus('current')
cwrBpiRsAuthInvalidErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("noInformation", 0), ("unauthorizedSlave", 1), ("undefined", 2), ("unsolicited", 3), ("invalidKeySequence", 4), ("keyRequestAuthenticationFailure", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsAuthInvalidErrorCode.setStatus('current')
cwrBpiRsAuthInvalidErrorString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 1, 1, 17), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsAuthInvalidErrorString.setStatus('current')
cwrBpiRsTEKTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2), )
if mibBuilder.loadTexts: cwrBpiRsTEKTable.setStatus('current')
cwrBpiRsTEKEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: cwrBpiRsTEKEntry.setStatus('current')
cwrBpiRsTEKEncryptionNegotiated = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 1), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKEncryptionNegotiated.setStatus('current')
cwrBpiRsTEKState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("start", 1), ("opWait", 2), ("opReauthWait", 3), ("operational", 4), ("rekeyWait", 5), ("rekeyReauthWait", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKState.setStatus('current')
cwrBpiRsTEKExpiresOld = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 3), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKExpiresOld.setStatus('current')
cwrBpiRsTEKExpiresNew = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 4), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKExpiresNew.setStatus('current')
cwrBpiRsTEKKeyRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKKeyRequests.setStatus('current')
cwrBpiRsTEKKeyReplies = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKKeyReplies.setStatus('current')
cwrBpiRsTEKInvalids = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKInvalids.setStatus('current')
cwrBpiRsTEKAuthPends = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKAuthPends.setStatus('current')
cwrBpiRsTEKInvalidErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("noInformation", 0), ("unauthorizedSlave", 1), ("undefined", 2), ("unsolicited", 3), ("invalidKeySequence", 4), ("keyRequestAuthenticationFailure", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKInvalidErrorCode.setStatus('current')
cwrBpiRsTEKInvalidErrorString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 1, 2, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRsTEKInvalidErrorString.setStatus('current')
cwrBpiRmObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2))
cwrBpiRmAuthTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1), )
if mibBuilder.loadTexts: cwrBpiRmAuthTable.setStatus('current')
cwrBpiRmAuthEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: cwrBpiRmAuthEntry.setStatus('current')
cwrBpiRmAuthPrivacyEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRmAuthPrivacyEnable.setStatus('current')
cwrBpiRmAuthRsPublicKey = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 126))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmAuthRsPublicKey.setStatus('current')
cwrBpiRmAuthRsKeySequenceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmAuthRsKeySequenceNumber.setStatus('current')
cwrBpiRmAuthRsExpires = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 4), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmAuthRsExpires.setStatus('current')
cwrBpiRmAuthRsLifetime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6048000))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRmAuthRsLifetime.setStatus('current')
cwrBpiRmAuthRsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 6), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRmAuthRsReset.setStatus('current')
cwrBpiRmAuthRsRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmAuthRsRequests.setStatus('current')
cwrBpiRmAuthRsReplies = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmAuthRsReplies.setStatus('current')
cwrBpiRmAuthRsInvalids = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmAuthRsInvalids.setStatus('current')
cwrBpiRmAuthInvalidErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("noInformation", 0), ("unauthorizedSlave", 1), ("undefined", 2), ("unsolicited", 3), ("invalidKeySequence", 4), ("keyRequestAuthenticationFailure", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmAuthInvalidErrorCode.setStatus('current')
cwrBpiRmAuthInvalidErrorString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 1, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmAuthInvalidErrorString.setStatus('current')
cwrBpiRmTEKTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2), )
if mibBuilder.loadTexts: cwrBpiRmTEKTable.setStatus('current')
cwrBpiRmTEKEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: cwrBpiRmTEKEntry.setStatus('current')
cwrBpiRmTEKEncryptionNegotiated = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 1), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmTEKEncryptionNegotiated.setStatus('current')
cwrBpiRmTEKLifetime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 604800))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRmTEKLifetime.setStatus('current')
cwrBpiRmTEKExpiresOld = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 3), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmTEKExpiresOld.setStatus('current')
cwrBpiRmTEKExpiresNew = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 4), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmTEKExpiresNew.setStatus('current')
cwrBpiRmTEKReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 5), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cwrBpiRmTEKReset.setStatus('current')
cwrBpiRmKeyRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmKeyRequests.setStatus('current')
cwrBpiRmKeyReplies = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmKeyReplies.setStatus('current')
cwrBpiRmTEKInvalids = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmTEKInvalids.setStatus('current')
cwrBpiRmTEKInvalidErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("noInformation", 0), ("unauthorizedSlave", 1), ("undefined", 2), ("unsolicited", 3), ("invalidKeySequence", 4), ("keyRequestAuthenticationFailure", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmTEKInvalidErrorCode.setStatus('current')
cwrBpiRmTEKInvalidErrorString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 135, 1, 2, 2, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwrBpiRmTEKInvalidErrorString.setStatus('current')
cwrBpiNotification = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 135, 2))
cwrBpiConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 135, 3))
cwrBpiCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 135, 3, 1))
cwrBpiGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 135, 3, 2))
cwrBpiBasicCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 135, 3, 1, 1)).setObjects(("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsGroup"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwrBpiBasicCompliance = cwrBpiBasicCompliance.setStatus('current')
cwrBpiRsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 135, 3, 2, 1)).setObjects(("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsPrivacyEnable"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsPublicKey"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthState"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthKeySequenceNumber"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthExpires"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthReset"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthGraceTime"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKGraceTime"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthWaitTimeout"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsReauthWaitTimeout"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsOpWaitTimeout"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsRekeyWaitTimeout"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthRequests"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthReplies"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthInvalids"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthInvalidErrorCode"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsAuthInvalidErrorString"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKEncryptionNegotiated"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKState"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKExpiresOld"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKExpiresNew"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKKeyRequests"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKKeyReplies"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKInvalids"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKAuthPends"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKInvalidErrorCode"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRsTEKInvalidErrorString"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwrBpiRsGroup = cwrBpiRsGroup.setStatus('current')
cwrBpiRmGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 135, 3, 2, 2)).setObjects(("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthPrivacyEnable"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthRsPublicKey"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthRsKeySequenceNumber"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthRsExpires"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthRsLifetime"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthRsReset"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthRsRequests"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthRsReplies"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthRsInvalids"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthInvalidErrorCode"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmAuthInvalidErrorString"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmTEKEncryptionNegotiated"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmTEKLifetime"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmTEKExpiresOld"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmTEKExpiresNew"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmTEKReset"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmKeyRequests"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmKeyReplies"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmTEKInvalids"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmTEKInvalidErrorCode"), ("CISCO-WIRELESS-P2P-BPI-MIB", "cwrBpiRmTEKInvalidErrorString"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cwrBpiRmGroup = cwrBpiRmGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-WIRELESS-P2P-BPI-MIB", cwrBpiRsOpWaitTimeout=cwrBpiRsOpWaitTimeout, cwrBpiRsTEKInvalidErrorCode=cwrBpiRsTEKInvalidErrorCode, cwrBpiRmTEKInvalids=cwrBpiRmTEKInvalids, cwrBpiRmAuthPrivacyEnable=cwrBpiRmAuthPrivacyEnable, cwrBpiGroups=cwrBpiGroups, cwrBpiConformance=cwrBpiConformance, cwrBpiRmTEKLifetime=cwrBpiRmTEKLifetime, cwrBpiRmAuthRsExpires=cwrBpiRmAuthRsExpires, cwrBpiRmAuthRsLifetime=cwrBpiRmAuthRsLifetime, cwrBpiRmTEKEntry=cwrBpiRmTEKEntry, cwrBpiRsTEKGraceTime=cwrBpiRsTEKGraceTime, cwrBpiRmAuthRsReplies=cwrBpiRmAuthRsReplies, cwrBpiRsTEKInvalidErrorString=cwrBpiRsTEKInvalidErrorString, cwrBpiRmTEKExpiresNew=cwrBpiRmTEKExpiresNew, PYSNMP_MODULE_ID=ciscoWirelessP2pBpiMIB, cwrBpiRmObjects=cwrBpiRmObjects, cwrBpiRmAuthTable=cwrBpiRmAuthTable, cwrBpiRmAuthEntry=cwrBpiRmAuthEntry, cwrBpiRsReauthWaitTimeout=cwrBpiRsReauthWaitTimeout, cwrBpiRsAuthReset=cwrBpiRsAuthReset, cwrBpiRsAuthInvalidErrorCode=cwrBpiRsAuthInvalidErrorCode, cwrBpiRsAuthWaitTimeout=cwrBpiRsAuthWaitTimeout, cwrBpiRmAuthRsRequests=cwrBpiRmAuthRsRequests, cwrBpiRsTEKTable=cwrBpiRsTEKTable, cwrBpiRmTEKTable=cwrBpiRmTEKTable, cwrBpiRsTEKExpiresOld=cwrBpiRsTEKExpiresOld, cwrBpiRsObjects=cwrBpiRsObjects, cwrBpiRsAuthInvalids=cwrBpiRsAuthInvalids, cwrBpiRsTEKExpiresNew=cwrBpiRsTEKExpiresNew, cwrBpiRsTEKKeyReplies=cwrBpiRsTEKKeyReplies, cwrBpiRsTEKEntry=cwrBpiRsTEKEntry, cwrBpiRsAuthRequests=cwrBpiRsAuthRequests, cwrBpiRsRekeyWaitTimeout=cwrBpiRsRekeyWaitTimeout, cwrBpiRsTEKState=cwrBpiRsTEKState, cwrBpiRsAuthExpires=cwrBpiRsAuthExpires, cwrBpiRmAuthRsPublicKey=cwrBpiRmAuthRsPublicKey, cwrBpiRmAuthRsReset=cwrBpiRmAuthRsReset, cwrBpiRsTEKAuthPends=cwrBpiRsTEKAuthPends, cwrBpiRsBaseTable=cwrBpiRsBaseTable, cwrBpiRsTEKInvalids=cwrBpiRsTEKInvalids, cwrBpiRmKeyReplies=cwrBpiRmKeyReplies, cwrBpiMIBObjects=cwrBpiMIBObjects, cwrBpiRsAuthKeySequenceNumber=cwrBpiRsAuthKeySequenceNumber, cwrBpiRmTEKInvalidErrorCode=cwrBpiRmTEKInvalidErrorCode, cwrBpiRsAuthState=cwrBpiRsAuthState, cwrBpiRsAuthInvalidErrorString=cwrBpiRsAuthInvalidErrorString, cwrBpiRsAuthReplies=cwrBpiRsAuthReplies, cwrBpiRmAuthRsInvalids=cwrBpiRmAuthRsInvalids, cwrBpiRmTEKReset=cwrBpiRmTEKReset, cwrBpiRsPublicKey=cwrBpiRsPublicKey, cwrBpiRmTEKInvalidErrorString=cwrBpiRmTEKInvalidErrorString, cwrBpiRsTEKKeyRequests=cwrBpiRsTEKKeyRequests, cwrBpiRmAuthInvalidErrorCode=cwrBpiRmAuthInvalidErrorCode, cwrBpiRmKeyRequests=cwrBpiRmKeyRequests, cwrBpiRsGroup=cwrBpiRsGroup, cwrBpiRsAuthGraceTime=cwrBpiRsAuthGraceTime, cwrBpiRmTEKExpiresOld=cwrBpiRmTEKExpiresOld, cwrBpiRsBaseEntry=cwrBpiRsBaseEntry, cwrBpiRmGroup=cwrBpiRmGroup, cwrBpiBasicCompliance=cwrBpiBasicCompliance, cwrBpiRsPrivacyEnable=cwrBpiRsPrivacyEnable, ciscoWirelessP2pBpiMIB=ciscoWirelessP2pBpiMIB, cwrBpiRsTEKEncryptionNegotiated=cwrBpiRsTEKEncryptionNegotiated, cwrBpiRmAuthRsKeySequenceNumber=cwrBpiRmAuthRsKeySequenceNumber, cwrBpiNotification=cwrBpiNotification, cwrBpiRmAuthInvalidErrorString=cwrBpiRmAuthInvalidErrorString, cwrBpiCompliances=cwrBpiCompliances, cwrBpiRmTEKEncryptionNegotiated=cwrBpiRmTEKEncryptionNegotiated)
| 147.181208 | 3,082 | 0.761833 | 2,422 | 21,930 | 6.897192 | 0.08877 | 0.010296 | 0.012032 | 0.016043 | 0.476324 | 0.392278 | 0.352888 | 0.342113 | 0.331158 | 0.273631 | 0 | 0.068078 | 0.07834 | 21,930 | 148 | 3,083 | 148.175676 | 0.758411 | 0.015777 | 0 | 0.021429 | 0 | 0 | 0.204264 | 0.104844 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.057143 | 0 | 0.057143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e092b68f9b1b3429ce9a5df082a95466e7ebf084 | 11,196 | py | Python | sdk/python/pulumi_azure_native/operationsmanagement/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/operationsmanagement/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/operationsmanagement/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'ArmTemplateParameterArgs',
'ManagementAssociationPropertiesArgs',
'ManagementConfigurationPropertiesArgs',
'SolutionPlanArgs',
'SolutionPropertiesArgs',
]
@pulumi.input_type
class ArmTemplateParameterArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None):
"""
Parameter to pass to ARM template
:param pulumi.Input[str] name: name of the parameter.
:param pulumi.Input[str] value: value for the parameter. In Jtoken
"""
if name is not None:
pulumi.set(__self__, "name", name)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
name of the parameter.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
value for the parameter. In Jtoken
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@pulumi.input_type
class ManagementAssociationPropertiesArgs:
def __init__(__self__, *,
application_id: pulumi.Input[str]):
"""
ManagementAssociation properties supported by the OperationsManagement resource provider.
:param pulumi.Input[str] application_id: The applicationId of the appliance for this association.
"""
pulumi.set(__self__, "application_id", application_id)
@property
@pulumi.getter(name="applicationId")
def application_id(self) -> pulumi.Input[str]:
"""
The applicationId of the appliance for this association.
"""
return pulumi.get(self, "application_id")
@application_id.setter
def application_id(self, value: pulumi.Input[str]):
pulumi.set(self, "application_id", value)
@pulumi.input_type
class ManagementConfigurationPropertiesArgs:
def __init__(__self__, *,
parameters: pulumi.Input[Sequence[pulumi.Input['ArmTemplateParameterArgs']]],
parent_resource_type: pulumi.Input[str],
template: Any,
application_id: Optional[pulumi.Input[str]] = None):
"""
ManagementConfiguration properties supported by the OperationsManagement resource provider.
:param pulumi.Input[Sequence[pulumi.Input['ArmTemplateParameterArgs']]] parameters: Parameters to run the ARM template
:param pulumi.Input[str] parent_resource_type: The type of the parent resource.
:param Any template: The Json object containing the ARM template to deploy
:param pulumi.Input[str] application_id: The applicationId of the appliance for this Management.
"""
pulumi.set(__self__, "parameters", parameters)
pulumi.set(__self__, "parent_resource_type", parent_resource_type)
pulumi.set(__self__, "template", template)
if application_id is not None:
pulumi.set(__self__, "application_id", application_id)
@property
@pulumi.getter
def parameters(self) -> pulumi.Input[Sequence[pulumi.Input['ArmTemplateParameterArgs']]]:
"""
Parameters to run the ARM template
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: pulumi.Input[Sequence[pulumi.Input['ArmTemplateParameterArgs']]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter(name="parentResourceType")
def parent_resource_type(self) -> pulumi.Input[str]:
"""
The type of the parent resource.
"""
return pulumi.get(self, "parent_resource_type")
@parent_resource_type.setter
def parent_resource_type(self, value: pulumi.Input[str]):
pulumi.set(self, "parent_resource_type", value)
@property
@pulumi.getter
def template(self) -> Any:
"""
The Json object containing the ARM template to deploy
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: Any):
pulumi.set(self, "template", value)
@property
@pulumi.getter(name="applicationId")
def application_id(self) -> Optional[pulumi.Input[str]]:
"""
The applicationId of the appliance for this Management.
"""
return pulumi.get(self, "application_id")
@application_id.setter
def application_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "application_id", value)
@pulumi.input_type
class SolutionPlanArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None,
product: Optional[pulumi.Input[str]] = None,
promotion_code: Optional[pulumi.Input[str]] = None,
publisher: Optional[pulumi.Input[str]] = None):
"""
Plan for solution object supported by the OperationsManagement resource provider.
:param pulumi.Input[str] name: name of the solution to be created. For Microsoft published solution it should be in the format of solutionType(workspaceName). SolutionType part is case sensitive. For third party solution, it can be anything.
:param pulumi.Input[str] product: name of the solution to enabled/add. For Microsoft published gallery solution it should be in the format of OMSGallery/<solutionType>. This is case sensitive
:param pulumi.Input[str] promotion_code: promotionCode, Not really used now, can you left as empty
:param pulumi.Input[str] publisher: Publisher name. For gallery solution, it is Microsoft.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if product is not None:
pulumi.set(__self__, "product", product)
if promotion_code is not None:
pulumi.set(__self__, "promotion_code", promotion_code)
if publisher is not None:
pulumi.set(__self__, "publisher", publisher)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
name of the solution to be created. For Microsoft published solution it should be in the format of solutionType(workspaceName). SolutionType part is case sensitive. For third party solution, it can be anything.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def product(self) -> Optional[pulumi.Input[str]]:
"""
name of the solution to enabled/add. For Microsoft published gallery solution it should be in the format of OMSGallery/<solutionType>. This is case sensitive
"""
return pulumi.get(self, "product")
@product.setter
def product(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "product", value)
@property
@pulumi.getter(name="promotionCode")
def promotion_code(self) -> Optional[pulumi.Input[str]]:
"""
promotionCode, Not really used now, can you left as empty
"""
return pulumi.get(self, "promotion_code")
@promotion_code.setter
def promotion_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "promotion_code", value)
@property
@pulumi.getter
def publisher(self) -> Optional[pulumi.Input[str]]:
"""
Publisher name. For gallery solution, it is Microsoft.
"""
return pulumi.get(self, "publisher")
@publisher.setter
def publisher(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "publisher", value)
@pulumi.input_type
class SolutionPropertiesArgs:
def __init__(__self__, *,
workspace_resource_id: pulumi.Input[str],
contained_resources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
referenced_resources: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Solution properties supported by the OperationsManagement resource provider.
:param pulumi.Input[str] workspace_resource_id: The azure resourceId for the workspace where the solution will be deployed/enabled.
:param pulumi.Input[Sequence[pulumi.Input[str]]] contained_resources: The azure resources that will be contained within the solutions. They will be locked and gets deleted automatically when the solution is deleted.
:param pulumi.Input[Sequence[pulumi.Input[str]]] referenced_resources: The resources that will be referenced from this solution. Deleting any of those solution out of band will break the solution.
"""
pulumi.set(__self__, "workspace_resource_id", workspace_resource_id)
if contained_resources is not None:
pulumi.set(__self__, "contained_resources", contained_resources)
if referenced_resources is not None:
pulumi.set(__self__, "referenced_resources", referenced_resources)
@property
@pulumi.getter(name="workspaceResourceId")
def workspace_resource_id(self) -> pulumi.Input[str]:
"""
The azure resourceId for the workspace where the solution will be deployed/enabled.
"""
return pulumi.get(self, "workspace_resource_id")
@workspace_resource_id.setter
def workspace_resource_id(self, value: pulumi.Input[str]):
pulumi.set(self, "workspace_resource_id", value)
@property
@pulumi.getter(name="containedResources")
def contained_resources(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The azure resources that will be contained within the solutions. They will be locked and gets deleted automatically when the solution is deleted.
"""
return pulumi.get(self, "contained_resources")
@contained_resources.setter
def contained_resources(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "contained_resources", value)
@property
@pulumi.getter(name="referencedResources")
def referenced_resources(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The resources that will be referenced from this solution. Deleting any of those solution out of band will break the solution.
"""
return pulumi.get(self, "referenced_resources")
@referenced_resources.setter
def referenced_resources(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "referenced_resources", value)
| 40.273381 | 249 | 0.670775 | 1,289 | 11,196 | 5.678045 | 0.117921 | 0.103703 | 0.091816 | 0.063123 | 0.745457 | 0.627135 | 0.563465 | 0.506217 | 0.478617 | 0.397185 | 0 | 0.000116 | 0.227403 | 11,196 | 277 | 250 | 40.418773 | 0.846012 | 0.30627 | 0 | 0.351852 | 1 | 0 | 0.114515 | 0.035076 | 0 | 0 | 0 | 0 | 0 | 1 | 0.203704 | false | 0 | 0.030864 | 0 | 0.351852 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0a1e2d3a448855253a2313293b7c629c47822f3 | 647 | py | Python | sharpy-sc2/sharpy/plans/acts/protoss/__init__.py | etzhang416/sharpy-bot-eco | badc68ad1aa903dfa1bbc33f6225608e433ff353 | [
"Unlicense"
] | null | null | null | sharpy-sc2/sharpy/plans/acts/protoss/__init__.py | etzhang416/sharpy-bot-eco | badc68ad1aa903dfa1bbc33f6225608e433ff353 | [
"Unlicense"
] | null | null | null | sharpy-sc2/sharpy/plans/acts/protoss/__init__.py | etzhang416/sharpy-bot-eco | badc68ad1aa903dfa1bbc33f6225608e433ff353 | [
"Unlicense"
] | null | null | null | from .archon import Archon, ActArchon
from .artosis_pylon import ArtosisPylon
from .auto_pylon import AutoPylon
from .chrono_any_tech import ChronoAnyTech
from .chrono_tech import ChronoTech
from .chrono_unit import ChronoUnit, ChronoUnitProduction
from .defensive_cannons import DefensiveCannons, ActDefensiveCannons
from .gate_unit import GateUnit
from .protoss_unit import ProtossUnit
from .restore_power import RestorePower
from .robo_unit import RoboUnit
from .star_unit import StarUnit
from .warp_unit import WarpUnit
from .chrono_building import ChronoBuilding
from .defensive_pylon import DefensivePylons, MineralCannons, MineralBatteries
| 40.4375 | 78 | 0.868624 | 80 | 647 | 6.8375 | 0.4875 | 0.109689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.100464 | 647 | 15 | 79 | 43.133333 | 0.939863 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
e0a4017ad831ab753c2076ab2aaf41d174e9016a | 355 | py | Python | src/spaceone/core/base.py | jihyungSong/python-core | 898ead301363d3e599ecd645b73071e639f886b0 | [
"Apache-2.0"
] | 14 | 2020-06-01T08:17:43.000Z | 2022-01-13T22:37:50.000Z | src/spaceone/core/base.py | jihyungSong/python-core | 898ead301363d3e599ecd645b73071e639f886b0 | [
"Apache-2.0"
] | 7 | 2020-08-11T23:05:59.000Z | 2022-01-12T05:08:49.000Z | src/spaceone/core/base.py | jihyungSong/python-core | 898ead301363d3e599ecd645b73071e639f886b0 | [
"Apache-2.0"
] | 11 | 2020-06-01T08:17:49.000Z | 2021-11-25T08:26:37.000Z | from spaceone.core.locator import Locator
from spaceone.core.transaction import Transaction
class CoreObject(object):
def __init__(self, transaction: Transaction = None):
if transaction:
self.transaction = transaction
else:
self.transaction = Transaction()
self.locator = Locator(self.transaction)
| 23.666667 | 56 | 0.687324 | 35 | 355 | 6.857143 | 0.428571 | 0.25 | 0.325 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.239437 | 355 | 14 | 57 | 25.357143 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.222222 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0a9ace1ea7c4d31861856d4b02de973ba656754 | 33,193 | py | Python | tests/unit/test_key.py | codefromhimanshu/python-ndb | 59d1c7b694595216d3410710336b8556d47066ce | [
"Apache-2.0"
] | null | null | null | tests/unit/test_key.py | codefromhimanshu/python-ndb | 59d1c7b694595216d3410710336b8556d47066ce | [
"Apache-2.0"
] | null | null | null | tests/unit/test_key.py | codefromhimanshu/python-ndb | 59d1c7b694595216d3410710336b8556d47066ce | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import pickle
import unittest.mock
from google.cloud.datastore import _app_engine_key_pb2
import google.cloud.datastore
import pytest
from google.cloud.ndb import exceptions
from google.cloud.ndb import key as key_module
from google.cloud.ndb import model
from google.cloud.ndb import _options
from google.cloud.ndb import tasklets
import tests.unit.utils
def test___all__():
tests.unit.utils.verify___all__(key_module)
class TestKey:
URLSAFE = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA"
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_default():
key = key_module.Key("Kind", 42)
assert key._key == google.cloud.datastore.Key(
"Kind", 42, project="testing"
)
assert key._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_different_namespace(context):
context.client.namespace = "DiffNamespace"
key = key_module.Key("Kind", 42)
assert key._key == google.cloud.datastore.Key(
"Kind", 42, project="testing", namespace="DiffNamespace"
)
assert key._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_empty_path():
with pytest.raises(TypeError):
key_module.Key(pairs=())
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_partial():
with pytest.raises(ValueError):
key_module.Key("Kind")
key = key_module.Key("Kind", None)
assert key._key.is_partial
assert key._key.flat_path == ("Kind",)
assert key._key.project == "testing"
assert key._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_invalid_id_type():
with pytest.raises(TypeError):
key_module.Key("Kind", object())
with pytest.raises(exceptions.BadArgumentError):
key_module.Key("Kind", None, "Also", 10)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_invalid_kind_type():
with pytest.raises(TypeError):
key_module.Key(object(), 47)
with pytest.raises(AttributeError):
key_module.Key(object, 47)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_kind_as_model():
class Simple(model.Model):
pass
key = key_module.Key(Simple, 47)
assert key._key == google.cloud.datastore.Key(
"Simple", 47, project="testing"
)
assert key._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_reference():
reference = make_reference()
key = key_module.Key(reference=reference)
assert key._key == google.cloud.datastore.Key(
"Parent",
59,
"Child",
"Feather",
project="sample-app",
namespace="space",
)
assert key._reference is reference
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_serialized():
serialized = (
b"j\x18s~sample-app-no-locationr\n\x0b\x12\x04Zorp\x18X\x0c"
)
key = key_module.Key(serialized=serialized)
assert key._key == google.cloud.datastore.Key(
"Zorp", 88, project="sample-app-no-location"
)
assert key._reference == make_reference(
path=({"type": "Zorp", "id": 88},),
app="s~sample-app-no-location",
namespace=None,
)
@pytest.mark.usefixtures("in_context")
def test_constructor_with_urlsafe(self):
key = key_module.Key(urlsafe=self.URLSAFE)
assert key._key == google.cloud.datastore.Key(
"Kind", "Thing", project="fire"
)
assert key._reference == make_reference(
path=({"type": "Kind", "name": "Thing"},),
app="s~fire",
namespace=None,
)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_pairs():
key = key_module.Key(pairs=[("Kind", 1)])
assert key._key == google.cloud.datastore.Key(
"Kind", 1, project="testing"
)
assert key._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_flat():
key = key_module.Key(flat=["Kind", 1])
assert key._key == google.cloud.datastore.Key(
"Kind", 1, project="testing"
)
assert key._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_flat_and_pairs():
with pytest.raises(TypeError):
key_module.Key(pairs=[("Kind", 1)], flat=["Kind", 1])
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_app():
key = key_module.Key("Kind", 10, app="s~foo")
assert key._key == google.cloud.datastore.Key(
"Kind", 10, project="foo"
)
assert key._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_project():
key = key_module.Key("Kind", 10, project="foo")
assert key._key == google.cloud.datastore.Key(
"Kind", 10, project="foo"
)
assert key._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_project_and_app():
with pytest.raises(TypeError):
key_module.Key("Kind", 10, project="foo", app="bar")
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_with_namespace():
key = key_module.Key("Kind", 1337, namespace="foo")
assert key._key == google.cloud.datastore.Key(
"Kind", 1337, project="testing", namespace="foo"
)
assert key._reference is None
@pytest.mark.usefixtures("in_context")
def test_constructor_with_parent(self):
parent = key_module.Key(urlsafe=self.URLSAFE)
key = key_module.Key("Zip", 10, parent=parent)
assert key._key == google.cloud.datastore.Key(
"Kind", "Thing", "Zip", 10, project="fire"
)
assert key._reference is None
@pytest.mark.usefixtures("in_context")
def test_constructor_with_parent_bad_type(self):
parent = unittest.mock.sentinel.parent
with pytest.raises(exceptions.BadValueError):
key_module.Key("Zip", 10, parent=parent)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_constructor_insufficient_args():
with pytest.raises(TypeError):
key_module.Key(app="foo")
@pytest.mark.usefixtures("in_context")
def test_no_subclass_for_reference(self):
class KeySubclass(key_module.Key):
pass
with pytest.raises(TypeError):
KeySubclass(urlsafe=self.URLSAFE)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_invalid_argument_combination():
with pytest.raises(TypeError):
key_module.Key(flat=["a", "b"], urlsafe=b"foo")
@pytest.mark.usefixtures("in_context")
def test_colliding_reference_arguments(self):
urlsafe = self.URLSAFE
padding = b"=" * (-len(urlsafe) % 4)
serialized = base64.urlsafe_b64decode(urlsafe + padding)
with pytest.raises(TypeError):
key_module.Key(urlsafe=urlsafe, serialized=serialized)
@staticmethod
@unittest.mock.patch("google.cloud.ndb.key.Key.__init__")
def test__from_ds_key(key_init):
ds_key = google.cloud.datastore.Key("a", "b", project="c")
key = key_module.Key._from_ds_key(ds_key)
assert key._key is ds_key
assert key._reference is None
key_init.assert_not_called()
@staticmethod
@pytest.mark.usefixtures("in_context")
def test___repr__defaults():
key = key_module.Key("a", "b")
assert repr(key) == "Key('a', 'b')"
assert str(key) == "Key('a', 'b')"
@staticmethod
@pytest.mark.usefixtures("in_context")
def test___repr__non_defaults():
key = key_module.Key("X", 11, app="foo", namespace="bar")
assert repr(key) == "Key('X', 11, project='foo', namespace='bar')"
assert str(key) == "Key('X', 11, project='foo', namespace='bar')"
@staticmethod
@pytest.mark.usefixtures("in_context")
def test___hash__():
key1 = key_module.Key("a", 1)
assert hash(key1) == hash(key1)
assert hash(key1) == hash(key1.pairs())
key2 = key_module.Key("a", 2)
assert hash(key1) != hash(key2)
@staticmethod
def test__tuple():
key = key_module.Key("X", 11, app="foo", namespace="n")
assert key._tuple() == ("foo", "n", (("X", 11),))
@staticmethod
def test___eq__():
key1 = key_module.Key("X", 11, app="foo", namespace="n")
key2 = key_module.Key("Y", 12, app="foo", namespace="n")
key3 = key_module.Key("X", 11, app="bar", namespace="n")
key4 = key_module.Key("X", 11, app="foo", namespace="m")
key5 = unittest.mock.sentinel.key
assert key1 == key1
assert not key1 == key2
assert not key1 == key3
assert not key1 == key4
assert not key1 == key5
@staticmethod
def test___ne__():
key1 = key_module.Key("X", 11, app="foo", namespace="n")
key2 = key_module.Key("Y", 12, app="foo", namespace="n")
key3 = key_module.Key("X", 11, app="bar", namespace="n")
key4 = key_module.Key("X", 11, app="foo", namespace="m")
key5 = unittest.mock.sentinel.key
assert not key1 != key1
assert key1 != key2
assert key1 != key3
assert key1 != key4
assert key1 != key5
@staticmethod
def test___lt__():
key1 = key_module.Key("X", 11, app="foo", namespace="n")
key2 = key_module.Key("Y", 12, app="foo", namespace="n")
key3 = key_module.Key("X", 11, app="goo", namespace="n")
key4 = key_module.Key("X", 11, app="foo", namespace="o")
key5 = unittest.mock.sentinel.key
assert not key1 < key1
assert key1 < key2
assert key1 < key3
assert key1 < key4
with pytest.raises(TypeError):
key1 < key5
@staticmethod
def test___le__():
key1 = key_module.Key("X", 11, app="foo", namespace="n")
key2 = key_module.Key("Y", 12, app="foo", namespace="n")
key3 = key_module.Key("X", 11, app="goo", namespace="n")
key4 = key_module.Key("X", 11, app="foo", namespace="o")
key5 = unittest.mock.sentinel.key
assert key1 <= key1
assert key1 <= key2
assert key1 <= key3
assert key1 <= key4
with pytest.raises(TypeError):
key1 <= key5
@staticmethod
def test___gt__():
key1 = key_module.Key("X", 11, app="foo", namespace="n")
key2 = key_module.Key("M", 10, app="foo", namespace="n")
key3 = key_module.Key("X", 11, app="boo", namespace="n")
key4 = key_module.Key("X", 11, app="foo", namespace="a")
key5 = unittest.mock.sentinel.key
assert not key1 > key1
assert key1 > key2
assert key1 > key3
assert key1 > key4
with pytest.raises(TypeError):
key1 > key5
@staticmethod
def test___ge__():
key1 = key_module.Key("X", 11, app="foo", namespace="n")
key2 = key_module.Key("M", 10, app="foo", namespace="n")
key3 = key_module.Key("X", 11, app="boo", namespace="n")
key4 = key_module.Key("X", 11, app="foo", namespace="a")
key5 = unittest.mock.sentinel.key
assert key1 >= key1
assert key1 >= key2
assert key1 >= key3
assert key1 >= key4
with pytest.raises(TypeError):
key1 >= key5
@staticmethod
def test_pickling():
key = key_module.Key("a", "b", app="c", namespace="d")
pickled = pickle.dumps(key)
unpickled = pickle.loads(pickled)
assert key == unpickled
@staticmethod
@pytest.mark.usefixtures("in_context")
def test___setstate__bad_state():
key = key_module.Key("a", "b")
state = ("not", "length", "one")
with pytest.raises(TypeError):
key.__setstate__(state)
state = ("not-a-dict",)
with pytest.raises(TypeError):
key.__setstate__(state)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_parent():
key = key_module.Key("a", "b", "c", "d")
parent = key.parent()
assert parent._key == key._key.parent
assert parent._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_parent_top_level():
key = key_module.Key("This", "key")
assert key.parent() is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_root():
key = key_module.Key("a", "b", "c", "d")
root = key.root()
assert root._key == key._key.parent
assert root._reference is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_root_top_level():
key = key_module.Key("This", "key")
assert key.root() is key
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_namespace():
namespace = "my-space"
key = key_module.Key("abc", 1, namespace=namespace)
assert key.namespace() == namespace
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_app():
app = "s~example"
key = key_module.Key("X", 100, app=app)
assert key.app() != app
assert key.app() == app[2:]
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_id():
for id_or_name in ("x", 11, None):
key = key_module.Key("Kind", id_or_name)
assert key.id() == id_or_name
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_string_id():
pairs = (("x", "x"), (11, None), (None, None))
for id_or_name, expected in pairs:
key = key_module.Key("Kind", id_or_name)
assert key.string_id() == expected
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_integer_id():
pairs = (("x", None), (11, 11), (None, None))
for id_or_name, expected in pairs:
key = key_module.Key("Kind", id_or_name)
assert key.integer_id() == expected
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_pairs():
key = key_module.Key("a", "b")
assert key.pairs() == (("a", "b"),)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_pairs_partial_key():
key = key_module.Key("This", "key", "that", None)
assert key.pairs() == (("This", "key"), ("that", None))
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_flat():
key = key_module.Key("This", "key")
assert key.flat() == ("This", "key")
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_flat_partial_key():
key = key_module.Key("Kind", None)
assert key.flat() == ("Kind", None)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_kind():
key = key_module.Key("This", "key")
assert key.kind() == "This"
key = key_module.Key("a", "b", "c", "d")
assert key.kind() == "c"
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_reference():
key = key_module.Key("This", "key", app="fire")
assert key.reference() == make_reference(
path=({"type": "This", "name": "key"},), app="fire", namespace=None
)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_reference_cached():
key = key_module.Key("This", "key")
key._reference = unittest.mock.sentinel.reference
assert key.reference() is unittest.mock.sentinel.reference
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_reference_bad_kind():
too_long = "a" * (key_module._MAX_KEYPART_BYTES + 1)
for kind in ("", too_long):
key = key_module.Key(kind, "key", app="app")
with pytest.raises(ValueError):
key.reference()
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_reference_bad_string_id():
too_long = "a" * (key_module._MAX_KEYPART_BYTES + 1)
for id_ in ("", too_long):
key = key_module.Key("kind", id_, app="app")
with pytest.raises(ValueError):
key.reference()
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_reference_bad_integer_id():
for id_ in (-10, 0, 2 ** 64):
key = key_module.Key("kind", id_, app="app")
with pytest.raises(ValueError):
key.reference()
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_serialized():
key = key_module.Key("a", 108, app="c")
assert key.serialized() == b"j\x01cr\x07\x0b\x12\x01a\x18l\x0c"
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_urlsafe():
key = key_module.Key("d", None, app="f")
assert key.urlsafe() == b"agFmcgULEgFkDA"
@staticmethod
@pytest.mark.usefixtures("in_context")
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
@unittest.mock.patch("google.cloud.ndb.model._entity_from_protobuf")
def test_get_with_cache_miss(_entity_from_protobuf, _datastore_api):
class Simple(model.Model):
pass
ds_future = tasklets.Future()
ds_future.set_result("ds_entity")
_datastore_api.lookup.return_value = ds_future
_entity_from_protobuf.return_value = "the entity"
key = key_module.Key("Simple", "b", app="c")
assert key.get(use_cache=True) == "the entity"
_datastore_api.lookup.assert_called_once_with(
key._key, _options.ReadOptions(use_cache=True)
)
_entity_from_protobuf.assert_called_once_with("ds_entity")
@staticmethod
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
@unittest.mock.patch("google.cloud.ndb.model._entity_from_protobuf")
def test_get_with_cache_hit(
_entity_from_protobuf, _datastore_api, in_context
):
class Simple(model.Model):
pass
ds_future = tasklets.Future()
ds_future.set_result("ds_entity")
_datastore_api.lookup.return_value = ds_future
_entity_from_protobuf.return_value = "the entity"
key = key_module.Key("Simple", "b", app="c")
mock_cached_entity = unittest.mock.Mock(_key=key)
in_context.cache[key] = mock_cached_entity
assert key.get(use_cache=True) == mock_cached_entity
_datastore_api.lookup.assert_not_called()
_entity_from_protobuf.assert_not_called()
@staticmethod
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
@unittest.mock.patch("google.cloud.ndb.model._entity_from_protobuf")
def test_get_no_cache(_entity_from_protobuf, _datastore_api, in_context):
class Simple(model.Model):
pass
ds_future = tasklets.Future()
ds_future.set_result("ds_entity")
_datastore_api.lookup.return_value = ds_future
_entity_from_protobuf.return_value = "the entity"
key = key_module.Key("Simple", "b", app="c")
mock_cached_entity = unittest.mock.Mock(_key=key)
in_context.cache[key] = mock_cached_entity
assert key.get(use_cache=False) == "the entity"
_datastore_api.lookup.assert_called_once_with(
key._key, _options.ReadOptions(use_cache=False)
)
_entity_from_protobuf.assert_called_once_with("ds_entity")
@staticmethod
@pytest.mark.usefixtures("in_context")
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
@unittest.mock.patch("google.cloud.ndb.model._entity_from_protobuf")
def test_get_w_hooks(_entity_from_protobuf, _datastore_api):
class Simple(model.Model):
pre_get_calls = []
post_get_calls = []
@classmethod
def _pre_get_hook(cls, *args, **kwargs):
cls.pre_get_calls.append((args, kwargs))
@classmethod
def _post_get_hook(cls, key, future, *args, **kwargs):
assert isinstance(future, tasklets.Future)
cls.post_get_calls.append(((key,) + args, kwargs))
ds_future = tasklets.Future()
ds_future.set_result("ds_entity")
_datastore_api.lookup.return_value = ds_future
_entity_from_protobuf.return_value = "the entity"
key = key_module.Key("Simple", 42)
assert key.get() == "the entity"
_datastore_api.lookup.assert_called_once_with(
key._key, _options.ReadOptions()
)
_entity_from_protobuf.assert_called_once_with("ds_entity")
assert Simple.pre_get_calls == [((key,), {})]
assert Simple.post_get_calls == [((key,), {})]
@staticmethod
@pytest.mark.usefixtures("in_context")
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
@unittest.mock.patch("google.cloud.ndb.model._entity_from_protobuf")
def test_get_async(_entity_from_protobuf, _datastore_api):
ds_future = tasklets.Future()
_datastore_api.lookup.return_value = ds_future
_entity_from_protobuf.return_value = "the entity"
key = key_module.Key("a", "b", app="c")
future = key.get_async()
ds_future.set_result("ds_entity")
assert future.result() == "the entity"
_datastore_api.lookup.assert_called_once_with(
key._key, _options.ReadOptions()
)
_entity_from_protobuf.assert_called_once_with("ds_entity")
@staticmethod
@pytest.mark.usefixtures("in_context")
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
def test_get_async_not_found(_datastore_api):
ds_future = tasklets.Future()
_datastore_api.lookup.return_value = ds_future
key = key_module.Key("a", "b", app="c")
future = key.get_async()
ds_future.set_result(_datastore_api._NOT_FOUND)
assert future.result() is None
@staticmethod
@pytest.mark.usefixtures("in_context")
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
def test_delete(_datastore_api):
class Simple(model.Model):
pass
future = tasklets.Future()
_datastore_api.delete.return_value = future
future.set_result("result")
key = key_module.Key("Simple", "b", app="c")
assert key.delete() == "result"
_datastore_api.delete.assert_called_once_with(
key._key, _options.Options()
)
@staticmethod
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
def test_delete_with_cache(_datastore_api, in_context):
class Simple(model.Model):
pass
future = tasklets.Future()
_datastore_api.delete.return_value = future
future.set_result("result")
key = key_module.Key("Simple", "b", app="c")
mock_cached_entity = unittest.mock.Mock(_key=key)
in_context.cache[key] = mock_cached_entity
assert key.delete(use_cache=True) == "result"
assert in_context.cache[key] is None
_datastore_api.delete.assert_called_once_with(
key._key, _options.Options(use_cache=True)
)
@staticmethod
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
def test_delete_no_cache(_datastore_api, in_context):
class Simple(model.Model):
pass
future = tasklets.Future()
_datastore_api.delete.return_value = future
future.set_result("result")
key = key_module.Key("Simple", "b", app="c")
mock_cached_entity = unittest.mock.Mock(_key=key)
in_context.cache[key] = mock_cached_entity
assert key.delete(use_cache=False) == "result"
assert in_context.cache[key] == mock_cached_entity
_datastore_api.delete.assert_called_once_with(
key._key, _options.Options(use_cache=False)
)
@staticmethod
@pytest.mark.usefixtures("in_context")
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
def test_delete_w_hooks(_datastore_api):
class Simple(model.Model):
pre_delete_calls = []
post_delete_calls = []
@classmethod
def _pre_delete_hook(cls, *args, **kwargs):
cls.pre_delete_calls.append((args, kwargs))
@classmethod
def _post_delete_hook(cls, key, future, *args, **kwargs):
assert isinstance(future, tasklets.Future)
cls.post_delete_calls.append(((key,) + args, kwargs))
future = tasklets.Future()
_datastore_api.delete.return_value = future
future.set_result("result")
key = key_module.Key("Simple", 42)
assert key.delete() == "result"
_datastore_api.delete.assert_called_once_with(
key._key, _options.Options()
)
assert Simple.pre_delete_calls == [((key,), {})]
assert Simple.post_delete_calls == [((key,), {})]
@staticmethod
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
def test_delete_in_transaction(_datastore_api, in_context):
future = tasklets.Future()
_datastore_api.delete.return_value = future
with in_context.new(transaction=b"tx123").use():
key = key_module.Key("a", "b", app="c")
assert key.delete() is None
_datastore_api.delete.assert_called_once_with(
key._key, _options.Options()
)
@staticmethod
@pytest.mark.usefixtures("in_context")
@unittest.mock.patch("google.cloud.ndb.key._datastore_api")
def test_delete_async(_datastore_api):
key = key_module.Key("a", "b", app="c")
future = tasklets.Future()
_datastore_api.delete.return_value = future
future.set_result("result")
result = key.delete_async().get_result()
_datastore_api.delete.assert_called_once_with(
key._key, _options.Options()
)
assert result == "result"
@staticmethod
def test_from_old_key():
with pytest.raises(NotImplementedError):
key_module.Key.from_old_key(None)
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_to_old_key():
key = key_module.Key("a", "b")
with pytest.raises(NotImplementedError):
key.to_old_key()
class Test__project_from_app:
@staticmethod
def test_already_clean():
app = "my-prahjekt"
assert key_module._project_from_app(app) == app
@staticmethod
def test_prefixed():
project = "my-prahjekt"
for prefix in ("s", "e", "dev"):
app = "{}~{}".format(prefix, project)
assert key_module._project_from_app(app) == project
@staticmethod
def test_app_fallback(context):
context.client.project = "s~jectpro"
with context.use():
assert key_module._project_from_app(None) == "jectpro"
class Test__from_reference:
def test_basic(self):
reference = make_reference()
ds_key = key_module._from_reference(reference, None, None)
assert ds_key == google.cloud.datastore.Key(
"Parent",
59,
"Child",
"Feather",
project="sample-app",
namespace="space",
)
def test_matching_app(self):
reference = make_reference()
ds_key = key_module._from_reference(reference, "s~sample-app", None)
assert ds_key == google.cloud.datastore.Key(
"Parent",
59,
"Child",
"Feather",
project="sample-app",
namespace="space",
)
def test_differing_app(self):
reference = make_reference()
with pytest.raises(RuntimeError):
key_module._from_reference(reference, "pickles", None)
def test_matching_namespace(self):
reference = make_reference()
ds_key = key_module._from_reference(reference, None, "space")
assert ds_key == google.cloud.datastore.Key(
"Parent",
59,
"Child",
"Feather",
project="sample-app",
namespace="space",
)
def test_differing_namespace(self):
reference = make_reference()
with pytest.raises(RuntimeError):
key_module._from_reference(reference, None, "pickles")
class Test__from_serialized:
@staticmethod
def test_basic():
serialized = (
b"j\x0cs~sample-appr\x1e\x0b\x12\x06Parent\x18;\x0c\x0b\x12\x05"
b'Child"\x07Feather\x0c\xa2\x01\x05space'
)
ds_key, reference = key_module._from_serialized(serialized, None, None)
assert ds_key == google.cloud.datastore.Key(
"Parent",
59,
"Child",
"Feather",
project="sample-app",
namespace="space",
)
assert reference == make_reference()
@staticmethod
def test_no_app_prefix():
serialized = (
b"j\x18s~sample-app-no-locationr\n\x0b\x12\x04Zorp\x18X\x0c"
)
ds_key, reference = key_module._from_serialized(serialized, None, None)
assert ds_key == google.cloud.datastore.Key(
"Zorp", 88, project="sample-app-no-location"
)
assert reference == make_reference(
path=({"type": "Zorp", "id": 88},),
app="s~sample-app-no-location",
namespace=None,
)
class Test__from_urlsafe:
@staticmethod
def test_basic():
urlsafe = (
"agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ"
"WF0aGVyDKIBBXNwYWNl"
)
urlsafe_bytes = urlsafe.encode("ascii")
for value in (urlsafe, urlsafe_bytes):
ds_key, reference = key_module._from_urlsafe(value, None, None)
assert ds_key == google.cloud.datastore.Key(
"Parent",
59,
"Child",
"Feather",
project="sample-app",
namespace="space",
)
assert reference == make_reference()
@staticmethod
def test_needs_padding():
urlsafe = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA"
ds_key, reference = key_module._from_urlsafe(urlsafe, None, None)
assert ds_key == google.cloud.datastore.Key(
"Kind", "Thing", project="fire"
)
assert reference == make_reference(
path=({"type": "Kind", "name": "Thing"},),
app="s~fire",
namespace=None,
)
class Test__constructor_handle_positional:
@staticmethod
def test_with_path():
args = ("Kind", 1)
kwargs = {}
key_module._constructor_handle_positional(args, kwargs)
assert kwargs == {"flat": args}
@staticmethod
def test_path_collide_flat():
args = ("Kind", 1)
kwargs = {"flat": ("OtherKind", "Cheese")}
with pytest.raises(TypeError):
key_module._constructor_handle_positional(args, kwargs)
@staticmethod
def test_dict_positional():
args = ({"flat": ("OtherKind", "Cheese"), "app": "ehp"},)
kwargs = {}
key_module._constructor_handle_positional(args, kwargs)
assert kwargs == args[0]
@staticmethod
def test_dict_positional_with_other_kwargs():
args = ({"flat": ("OtherKind", "Cheese"), "app": "ehp"},)
kwargs = {"namespace": "over-here"}
with pytest.raises(TypeError):
key_module._constructor_handle_positional(args, kwargs)
def make_reference(
path=({"type": "Parent", "id": 59}, {"type": "Child", "name": "Feather"}),
app="s~sample-app",
namespace="space",
):
elements = [
_app_engine_key_pb2.Path.Element(**element) for element in path
]
return _app_engine_key_pb2.Reference(
app=app,
path=_app_engine_key_pb2.Path(element=elements),
name_space=namespace,
)
| 33.528283 | 79 | 0.615823 | 3,935 | 33,193 | 4.941296 | 0.080813 | 0.05323 | 0.05863 | 0.066242 | 0.777309 | 0.731022 | 0.704948 | 0.666324 | 0.61402 | 0.55647 | 0 | 0.0138 | 0.255536 | 33,193 | 989 | 80 | 33.562184 | 0.773057 | 0.01651 | 0 | 0.562195 | 0 | 0.003659 | 0.10012 | 0.034691 | 0 | 0 | 0 | 0 | 0.170732 | 1 | 0.113415 | false | 0.009756 | 0.014634 | 0 | 0.15 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0ae79cb95c45887a376484b9de01d657ef6c406 | 266 | py | Python | setup.py | wonderbeyond/auto-extractor | 5d5eef02b9c335892c508742863fe50739d24d7b | [
"Unlicense"
] | null | null | null | setup.py | wonderbeyond/auto-extractor | 5d5eef02b9c335892c508742863fe50739d24d7b | [
"Unlicense"
] | null | null | null | setup.py | wonderbeyond/auto-extractor | 5d5eef02b9c335892c508742863fe50739d24d7b | [
"Unlicense"
] | null | null | null | from setuptools import setup
setup(
name='auto-extractor',
version="0.0.4",
description='Watch and auto extract zip files',
author='wonderbeyond',
install_requires=[
'inotify',
'chardet',
],
packages=["auto_extractor"]
)
| 19 | 51 | 0.620301 | 28 | 266 | 5.821429 | 0.821429 | 0.159509 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014925 | 0.244361 | 266 | 13 | 52 | 20.461538 | 0.79602 | 0 | 0 | 0 | 0 | 0 | 0.342105 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.083333 | 0 | 0.083333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0aecd67a311684ae599104ec80d0bb46ee53194 | 216 | py | Python | anfisa/anfisa/urls.py | IlyaBoyur/anfisa4friends | 3e89e946298cbba8d8a1982f0e013c9b3de29075 | [
"MIT"
] | null | null | null | anfisa/anfisa/urls.py | IlyaBoyur/anfisa4friends | 3e89e946298cbba8d8a1982f0e013c9b3de29075 | [
"MIT"
] | null | null | null | anfisa/anfisa/urls.py | IlyaBoyur/anfisa4friends | 3e89e946298cbba8d8a1982f0e013c9b3de29075 | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('icecream/', include('icecream.urls')),
path('', include('homepage.urls')),
]
| 24 | 48 | 0.675926 | 26 | 216 | 5.615385 | 0.461538 | 0.136986 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148148 | 216 | 8 | 49 | 27 | 0.793478 | 0 | 0 | 0 | 0 | 0 | 0.189815 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0b2ab9822645d7962a5546fe309f8b664523305 | 65,921 | py | Python | yt_dlp/extractor/peertube.py | S780821/itxyoutube | 87e8e8a7d02b546ce6b8e34f502b8815b953f8cb | [
"Unlicense"
] | 1 | 2022-03-08T03:26:31.000Z | 2022-03-08T03:26:31.000Z | yt_dlp/extractor/peertube.py | S780821/itxyoutube | 87e8e8a7d02b546ce6b8e34f502b8815b953f8cb | [
"Unlicense"
] | 6 | 2022-02-06T10:48:29.000Z | 2022-02-06T13:41:37.000Z | yt_dlp/extractor/peertube.py | S780821/itxyoutube | 87e8e8a7d02b546ce6b8e34f502b8815b953f8cb | [
"Unlicense"
] | 1 | 2022-03-13T02:21:51.000Z | 2022-03-13T02:21:51.000Z | # coding: utf-8
from __future__ import unicode_literals
import functools
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
format_field,
int_or_none,
parse_resolution,
str_or_none,
try_get,
unified_timestamp,
url_or_none,
urljoin,
OnDemandPagedList,
)
class PeerTubeIE(InfoExtractor):
_INSTANCES_RE = r'''(?:
# Taken from https://instances.joinpeertube.org/instances
40two\.tube|
a\.metube\.ch|
advtv\.ml|
algorithmic\.tv|
alimulama\.com|
arcana\.fun|
archive\.vidicon\.org|
artefac-paris\.tv|
auf1\.eu|
battlepenguin\.video|
beertube\.epgn\.ch|
befree\.nohost\.me|
bideoak\.argia\.eus|
birkeundnymphe\.de|
bitcointv\.com|
cattube\.org|
clap\.nerv-project\.eu|
climatejustice\.video|
comf\.tube|
conspiracydistillery\.com|
darkvapor\.nohost\.me|
daschauher\.aksel\.rocks|
digitalcourage\.video|
dreiecksnebel\.alex-detsch\.de|
eduvid\.org|
evangelisch\.video|
exo\.tube|
fair\.tube|
fediverse\.tv|
film\.k-prod\.fr|
flim\.txmn\.tk|
fotogramas\.politicaconciencia\.org|
ftsi\.ru|
gary\.vger\.cloud|
graeber\.video|
greatview\.video|
grypstube\.uni-greifswald\.de|
highvoltage\.tv|
hpstube\.fr|
htp\.live|
hyperreal\.tube|
juggling\.digital|
kino\.kompot\.si|
kino\.schuerz\.at|
kinowolnosc\.pl|
kirche\.peertube-host\.de|
kodcast\.com|
kolektiva\.media|
kraut\.zone|
kumi\.tube|
lastbreach\.tv|
lepetitmayennais\.fr\.nf|
lexx\.impa\.me|
libertynode\.tv|
libra\.syntazia\.org|
libremedia\.video|
live\.libratoi\.org|
live\.nanao\.moe|
live\.toobnix\.org|
livegram\.net|
lolitube\.freedomchan\.moe|
lucarne\.balsamine\.be|
maindreieck-tv\.de|
mani\.tube|
manicphase\.me|
media\.gzevd\.de|
media\.inno3\.cricket|
media\.kaitaia\.life|
media\.krashboyz\.org|
media\.over-world\.org|
media\.skewed\.de|
media\.undeadnetwork\.de|
medias\.pingbase\.net|
melsungen\.peertube-host\.de|
mirametube\.fr|
mojotube\.net|
monplaisirtube\.ddns\.net|
mountaintown\.video|
my\.bunny\.cafe|
myfreetube\.de|
mytube\.kn-cloud\.de|
mytube\.madzel\.de|
myworkoutarenapeertube\.cf|
nanawel-peertube\.dyndns\.org|
nastub\.cz|
offenes\.tv|
orgdup\.media|
ovaltube\.codinglab\.ch|
p2ptv\.ru|
p\.eertu\.be|
p\.lu|
peer\.azurs\.fr|
peertube1\.zeteo\.me|
peertube\.020\.pl|
peertube\.0x5e\.eu|
peertube\.alpharius\.io|
peertube\.am-networks\.fr|
peertube\.anduin\.net|
peertube\.anzui\.dev|
peertube\.arbleizez\.bzh|
peertube\.art3mis\.de|
peertube\.atilla\.org|
peertube\.atsuchan\.page|
peertube\.aukfood\.net|
peertube\.aventer\.biz|
peertube\.b38\.rural-it\.org|
peertube\.beeldengeluid\.nl|
peertube\.be|
peertube\.bgzashtita\.es|
peertube\.bitsandlinux\.com|
peertube\.biz|
peertube\.boba\.best|
peertube\.br0\.fr|
peertube\.bridaahost\.ynh\.fr|
peertube\.bubbletea\.dev|
peertube\.bubuit\.net|
peertube\.cabaal\.net|
peertube\.cats-home\.net|
peertube\.chemnitz\.freifunk\.net|
peertube\.chevro\.fr|
peertube\.chrisspiegl\.com|
peertube\.chtisurel\.net|
peertube\.cipherbliss\.com|
peertube\.cloud\.sans\.pub|
peertube\.cpge-brizeux\.fr|
peertube\.ctseuro\.com|
peertube\.cuatrolibertades\.org|
peertube\.cybercirujas\.club|
peertube\.cythin\.com|
peertube\.davigge\.com|
peertube\.dc\.pini\.fr|
peertube\.debian\.social|
peertube\.demonix\.fr|
peertube\.designersethiques\.org|
peertube\.desmu\.fr|
peertube\.devloprog\.org|
peertube\.devol\.it|
peertube\.dtmf\.ca|
peertube\.ecologie\.bzh|
peertube\.eu\.org|
peertube\.european-pirates\.eu|
peertube\.euskarabildua\.eus|
peertube\.fenarinarsa\.com|
peertube\.fomin\.site|
peertube\.forsud\.be|
peertube\.francoispelletier\.org|
peertube\.freenet\.ru|
peertube\.freetalklive\.com|
peertube\.functional\.cafe|
peertube\.gardeludwig\.fr|
peertube\.gargantia\.fr|
peertube\.gcfamily\.fr|
peertube\.genma\.fr|
peertube\.get-racing\.de|
peertube\.gidikroon\.eu|
peertube\.gruezishop\.ch|
peertube\.habets\.house|
peertube\.hackerfraternity\.org|
peertube\.ichigo\.everydayimshuflin\.com|
peertube\.ignifi\.me|
peertube\.inapurna\.org|
peertube\.informaction\.info|
peertube\.interhop\.org|
peertube\.iselfhost\.com|
peertube\.it|
peertube\.jensdiemer\.de|
peertube\.joffreyverd\.fr|
peertube\.kalua\.im|
peertube\.kathryl\.fr|
peertube\.keazilla\.net|
peertube\.klaewyss\.fr|
peertube\.kodcast\.com|
peertube\.kx\.studio|
peertube\.lagvoid\.com|
peertube\.lavallee\.tech|
peertube\.le5emeaxe\.fr|
peertube\.lestutosdeprocessus\.fr|
peertube\.librenet\.co\.za|
peertube\.logilab\.fr|
peertube\.louisematic\.site|
peertube\.luckow\.org|
peertube\.luga\.at|
peertube\.lyceeconnecte\.fr|
peertube\.manalejandro\.com|
peertube\.marud\.fr|
peertube\.mattone\.net|
peertube\.maxweiss\.io|
peertube\.monlycee\.net|
peertube\.mxinfo\.fr|
peertube\.myrasp\.eu|
peertube\.nebelcloud\.de|
peertube\.netzbegruenung\.de|
peertube\.newsocial\.tech|
peertube\.nicolastissot\.fr|
peertube\.nz|
peertube\.offerman\.com|
peertube\.opencloud\.lu|
peertube\.orthus\.link|
peertube\.patapouf\.xyz|
peertube\.pi2\.dev|
peertube\.plataformess\.org|
peertube\.pl|
peertube\.portaesgnos\.org|
peertube\.r2\.enst\.fr|
peertube\.r5c3\.fr|
peertube\.radres\.xyz|
peertube\.red|
peertube\.robonomics\.network|
peertube\.rtnkv\.cloud|
peertube\.runfox\.tk|
peertube\.satoshishop\.de|
peertube\.scic-tetris\.org|
peertube\.securitymadein\.lu|
peertube\.semweb\.pro|
peertube\.social\.my-wan\.de|
peertube\.soykaf\.org|
peertube\.stefofficiel\.me|
peertube\.stream|
peertube\.su|
peertube\.swrs\.net|
peertube\.takeko\.cyou|
peertube\.tangentfox\.com|
peertube\.taxinachtegel\.de|
peertube\.thenewoil\.xyz|
peertube\.ti-fr\.com|
peertube\.tiennot\.net|
peertube\.troback\.com|
peertube\.tspu\.edu\.ru|
peertube\.tux\.ovh|
peertube\.tv|
peertube\.tweb\.tv|
peertube\.ucy\.de|
peertube\.underworld\.fr|
peertube\.us\.to|
peertube\.ventresmous\.fr|
peertube\.vlaki\.cz|
peertube\.w\.utnw\.de|
peertube\.westring\.digital|
peertube\.xwiki\.com|
peertube\.zoz-serv\.org|
peervideo\.ru|
periscope\.numenaute\.org|
perron-tube\.de|
petitlutinartube\.fr|
phijkchu\.com|
pierre\.tube|
piraten\.space|
play\.rosano\.ca|
player\.ojamajo\.moe|
plextube\.nl|
pocketnetpeertube1\.nohost\.me|
pocketnetpeertube3\.nohost\.me|
pocketnetpeertube4\.nohost\.me|
pocketnetpeertube5\.nohost\.me|
pocketnetpeertube6\.nohost\.me|
pt\.24-7\.ro|
pt\.apathy\.top|
pt\.diaspodon\.fr|
pt\.fedi\.tech|
pt\.maciej\.website|
ptb\.lunarviews\.net|
ptmir1\.inter21\.net|
ptmir2\.inter21\.net|
ptmir3\.inter21\.net|
ptmir4\.inter21\.net|
ptmir5\.inter21\.net|
ptube\.horsentiers\.fr|
ptube\.xmanifesto\.club|
queermotion\.org|
re-wizja\.re-medium\.com|
regarder\.sans\.pub|
ruraletv\.ovh|
s1\.gegenstimme\.tv|
s2\.veezee\.tube|
sdmtube\.fr|
sender-fm\.veezee\.tube|
serv1\.wiki-tube\.de|
serv3\.wiki-tube\.de|
sickstream\.net|
sleepy\.tube|
sovran\.video|
spectra\.video|
stream\.elven\.pw|
stream\.k-prod\.fr|
stream\.shahab\.nohost\.me|
streamsource\.video|
studios\.racer159\.com|
testtube\.florimond\.eu|
tgi\.hosted\.spacebear\.ee|
thaitube\.in\.th|
the\.jokertv\.eu|
theater\.ethernia\.net|
thecool\.tube|
tilvids\.com|
toob\.bub\.org|
tpaw\.video|
truetube\.media|
tuba\.lhub\.pl|
tube-aix-marseille\.beta\.education\.fr|
tube-amiens\.beta\.education\.fr|
tube-besancon\.beta\.education\.fr|
tube-bordeaux\.beta\.education\.fr|
tube-clermont-ferrand\.beta\.education\.fr|
tube-corse\.beta\.education\.fr|
tube-creteil\.beta\.education\.fr|
tube-dijon\.beta\.education\.fr|
tube-education\.beta\.education\.fr|
tube-grenoble\.beta\.education\.fr|
tube-lille\.beta\.education\.fr|
tube-limoges\.beta\.education\.fr|
tube-montpellier\.beta\.education\.fr|
tube-nancy\.beta\.education\.fr|
tube-nantes\.beta\.education\.fr|
tube-nice\.beta\.education\.fr|
tube-normandie\.beta\.education\.fr|
tube-orleans-tours\.beta\.education\.fr|
tube-outremer\.beta\.education\.fr|
tube-paris\.beta\.education\.fr|
tube-poitiers\.beta\.education\.fr|
tube-reims\.beta\.education\.fr|
tube-rennes\.beta\.education\.fr|
tube-strasbourg\.beta\.education\.fr|
tube-toulouse\.beta\.education\.fr|
tube-versailles\.beta\.education\.fr|
tube1\.it\.tuwien\.ac\.at|
tube\.abolivier\.bzh|
tube\.ac-amiens\.fr|
tube\.aerztefueraufklaerung\.de|
tube\.alexx\.ml|
tube\.amic37\.fr|
tube\.anufrij\.de|
tube\.apolut\.net|
tube\.arkhalabs\.io|
tube\.arthack\.nz|
tube\.as211696\.net|
tube\.avensio\.de|
tube\.azbyka\.ru|
tube\.azkware\.net|
tube\.bachaner\.fr|
tube\.bmesh\.org|
tube\.borked\.host|
tube\.bstly\.de|
tube\.chaoszone\.tv|
tube\.chatelet\.ovh|
tube\.cloud-libre\.eu|
tube\.cms\.garden|
tube\.cowfee\.moe|
tube\.cryptography\.dog|
tube\.darknight-coffee\.org|
tube\.dev\.lhub\.pl|
tube\.distrilab\.fr|
tube\.dsocialize\.net|
tube\.ebin\.club|
tube\.fdn\.fr|
tube\.florimond\.eu|
tube\.foxarmy\.ml|
tube\.foxden\.party|
tube\.frischesicht\.de|
tube\.futuretic\.fr|
tube\.gnous\.eu|
tube\.grap\.coop|
tube\.graz\.social|
tube\.grin\.hu|
tube\.hackerscop\.org|
tube\.hordearii\.fr|
tube\.jeena\.net|
tube\.kai-stuht\.com|
tube\.kockatoo\.org|
tube\.kotur\.org|
tube\.lacaveatonton\.ovh|
tube\.linkse\.media|
tube\.lokad\.com|
tube\.lucie-philou\.com|
tube\.melonbread\.xyz|
tube\.mfraters\.net|
tube\.motuhake\.xyz|
tube\.mrbesen\.de|
tube\.nah\.re|
tube\.nchoco\.net|
tube\.novg\.net|
tube\.nox-rhea\.org|
tube\.nuagelibre\.fr|
tube\.nx12\.net|
tube\.octaplex\.net|
tube\.odat\.xyz|
tube\.oisux\.org|
tube\.opportunis\.me|
tube\.org\.il|
tube\.ortion\.xyz|
tube\.others\.social|
tube\.picasoft\.net|
tube\.plomlompom\.com|
tube\.pmj\.rocks|
tube\.portes-imaginaire\.org|
tube\.pyngu\.com|
tube\.rebellion\.global|
tube\.rhythms-of-resistance\.org|
tube\.rita\.moe|
tube\.rsi\.cnr\.it|
tube\.s1gm4\.eu|
tube\.saumon\.io|
tube\.schleuss\.online|
tube\.schule\.social|
tube\.seditio\.fr|
tube\.shanti\.cafe|
tube\.shela\.nu|
tube\.skrep\.in|
tube\.sp-codes\.de|
tube\.sp4ke\.com|
tube\.superseriousbusiness\.org|
tube\.systest\.eu|
tube\.tappret\.fr|
tube\.tardis\.world|
tube\.toontoet\.nl|
tube\.tpshd\.de|
tube\.troopers\.agency|
tube\.tylerdavis\.xyz|
tube\.undernet\.uy|
tube\.vigilian-consulting\.nl|
tube\.vraphim\.com|
tube\.wehost\.lgbt|
tube\.wien\.rocks|
tube\.wolfe\.casa|
tube\.xd0\.de|
tube\.xy-space\.de|
tube\.yapbreak\.fr|
tubedu\.org|
tubes\.jodh\.us|
tuktube\.com|
turkum\.me|
tututu\.tube|
tuvideo\.encanarias\.info|
tv1\.cocu\.cc|
tv1\.gomntu\.space|
tv2\.cocu\.cc|
tv\.adn\.life|
tv\.atmx\.ca|
tv\.bitma\.st|
tv\.generallyrubbish\.net\.au|
tv\.lumbung\.space|
tv\.mattchristiansenmedia\.com|
tv\.netwhood\.online|
tv\.neue\.city|
tv\.piejacker\.net|
tv\.pirateradio\.social|
tv\.undersco\.re|
tvox\.ru|
twctube\.twc-zone\.eu|
unfilter\.tube|
v\.basspistol\.org|
v\.kisombrella\.top|
v\.lastorder\.xyz|
v\.lor\.sh|
v\.phreedom\.club|
v\.sil\.sh|
v\.szy\.io|
v\.xxxapex\.com|
veezee\.tube|
vid\.dascoyote\.xyz|
vid\.garwood\.io|
vid\.ncrypt\.at|
vid\.pravdastalina\.info|
vid\.qorg11\.net|
vid\.rajeshtaylor\.com|
vid\.samtripoli\.com|
vid\.werefox\.dev|
vid\.wildeboer\.net|
video-cave-v2\.de|
video\.076\.ne\.jp|
video\.1146\.nohost\.me|
video\.altertek\.org|
video\.anartist\.org|
video\.apps\.thedoodleproject\.net|
video\.artist\.cx|
video\.asgardius\.company|
video\.balsillie\.net|
video\.bards\.online|
video\.binarydad\.com|
video\.blast-info\.fr|
video\.catgirl\.biz|
video\.cigliola\.com|
video\.cm-en-transition\.fr|
video\.cnt\.social|
video\.coales\.co|
video\.codingfield\.com|
video\.comptoir\.net|
video\.comune\.trento\.it|
video\.cpn\.so|
video\.csc49\.fr|
video\.cybre\.town|
video\.demokratischer-sommer\.de|
video\.discord-insoumis\.fr|
video\.dolphincastle\.com|
video\.dresden\.network|
video\.ecole-89\.com|
video\.elgrillolibertario\.org|
video\.emergeheart\.info|
video\.eradicatinglove\.xyz|
video\.ethantheenigma\.me|
video\.exodus-privacy\.eu\.org|
video\.fbxl\.net|
video\.fhtagn\.org|
video\.greenmycity\.eu|
video\.guerredeclasse\.fr|
video\.gyt\.is|
video\.hackers\.town|
video\.hardlimit\.com|
video\.hooli\.co|
video\.igem\.org|
video\.internet-czas-dzialac\.pl|
video\.islameye\.com|
video\.kicik\.fr|
video\.kuba-orlik\.name|
video\.kyushojitsu\.ca|
video\.lavolte\.net|
video\.lespoesiesdheloise\.fr|
video\.liberta\.vip|
video\.liege\.bike|
video\.linc\.systems|
video\.linux\.it|
video\.linuxtrent\.it|
video\.lokal\.social|
video\.lono\.space|
video\.lunasqu\.ee|
video\.lundi\.am|
video\.marcorennmaus\.de|
video\.mass-trespass\.uk|
video\.mugoreve\.fr|
video\.mundodesconocido\.com|
video\.mycrowd\.ca|
video\.nogafam\.es|
video\.odayacres\.farm|
video\.ozgurkon\.org|
video\.p1ng0ut\.social|
video\.p3x\.de|
video\.pcf\.fr|
video\.pony\.gallery|
video\.potate\.space|
video\.pourpenser\.pro|
video\.progressiv\.dev|
video\.resolutions\.it|
video\.rw501\.de|
video\.screamer\.wiki|
video\.sdm-tools\.net|
video\.sftblw\.moe|
video\.shitposter\.club|
video\.skyn3t\.in|
video\.soi\.ch|
video\.stuartbrand\.co\.uk|
video\.thinkof\.name|
video\.toot\.pt|
video\.triplea\.fr|
video\.turbo\.chat|
video\.vaku\.org\.ua|
video\.veloma\.org|
video\.violoncello\.ch|
video\.wilkie\.how|
video\.wsf2021\.info|
videorelay\.co|
videos-passages\.huma-num\.fr|
videos\.3d-wolf\.com|
videos\.ahp-numerique\.fr|
videos\.alexandrebadalo\.pt|
videos\.archigny\.net|
videos\.benjaminbrady\.ie|
videos\.buceoluegoexisto\.com|
videos\.capas\.se|
videos\.casually\.cat|
videos\.cloudron\.io|
videos\.coletivos\.org|
videos\.danksquad\.org|
videos\.denshi\.live|
videos\.fromouter\.space|
videos\.fsci\.in|
videos\.globenet\.org|
videos\.hauspie\.fr|
videos\.hush\.is|
videos\.john-livingston\.fr|
videos\.jordanwarne\.xyz|
videos\.lavoixdessansvoix\.org|
videos\.leslionsfloorball\.fr|
videos\.lucero\.top|
videos\.martyn\.berlin|
videos\.mastodont\.cat|
videos\.monstro1\.com|
videos\.npo\.city|
videos\.optoutpod\.com|
videos\.petch\.rocks|
videos\.pzelawski\.xyz|
videos\.rampin\.org|
videos\.scanlines\.xyz|
videos\.shmalls\.pw|
videos\.sibear\.fr|
videos\.stadtfabrikanten\.org|
videos\.tankernn\.eu|
videos\.testimonia\.org|
videos\.thisishowidontdisappear\.com|
videos\.traumaheilung\.net|
videos\.trom\.tf|
videos\.wakkerewereld\.nu|
videos\.weblib\.re|
videos\.yesil\.club|
vids\.roshless\.me|
vids\.tekdmn\.me|
vidz\.dou\.bet|
vod\.lumikko\.dev|
vs\.uniter\.network|
vulgarisation-informatique\.fr|
watch\.breadtube\.tv|
watch\.deranalyst\.ch|
watch\.ignorance\.eu|
watch\.krazy\.party|
watch\.libertaria\.space|
watch\.rt4mn\.org|
watch\.softinio\.com|
watch\.tubelab\.video|
web-fellow\.de|
webtv\.vandoeuvre\.net|
wechill\.space|
wikileaks\.video|
wiwi\.video|
worldofvids\.com|
wwtube\.net|
www4\.mir\.inter21\.net|
www\.birkeundnymphe\.de|
www\.captain-german\.com|
www\.wiki-tube\.de|
xxivproduction\.video|
xxx\.noho\.st|
# from youtube-dl
peertube\.rainbowswingers\.net|
tube\.stanisic\.nl|
peer\.suiri\.us|
medias\.libox\.fr|
videomensoif\.ynh\.fr|
peertube\.travelpandas\.eu|
peertube\.rachetjay\.fr|
peertube\.montecsys\.fr|
tube\.eskuero\.me|
peer\.tube|
peertube\.umeahackerspace\.se|
tube\.nx-pod\.de|
video\.monsieurbidouille\.fr|
tube\.openalgeria\.org|
vid\.lelux\.fi|
video\.anormallostpod\.ovh|
tube\.crapaud-fou\.org|
peertube\.stemy\.me|
lostpod\.space|
exode\.me|
peertube\.snargol\.com|
vis\.ion\.ovh|
videosdulib\.re|
v\.mbius\.io|
videos\.judrey\.eu|
peertube\.osureplayviewer\.xyz|
peertube\.mathieufamily\.ovh|
www\.videos-libr\.es|
fightforinfo\.com|
peertube\.fediverse\.ru|
peertube\.oiseauroch\.fr|
video\.nesven\.eu|
v\.bearvideo\.win|
video\.qoto\.org|
justporn\.cc|
video\.vny\.fr|
peervideo\.club|
tube\.taker\.fr|
peertube\.chantierlibre\.org|
tube\.ipfixe\.info|
tube\.kicou\.info|
tube\.dodsorf\.as|
videobit\.cc|
video\.yukari\.moe|
videos\.elbinario\.net|
hkvideo\.live|
pt\.tux\.tf|
www\.hkvideo\.live|
FIGHTFORINFO\.com|
pt\.765racing\.com|
peertube\.gnumeria\.eu\.org|
nordenmedia\.com|
peertube\.co\.uk|
tube\.darfweb\.eu|
tube\.kalah-france\.org|
0ch\.in|
vod\.mochi\.academy|
film\.node9\.org|
peertube\.hatthieves\.es|
video\.fitchfamily\.org|
peertube\.ddns\.net|
video\.ifuncle\.kr|
video\.fdlibre\.eu|
tube\.22decembre\.eu|
peertube\.harmoniescreatives\.com|
tube\.fabrigli\.fr|
video\.thedwyers\.co|
video\.bruitbruit\.com|
peertube\.foxfam\.club|
peer\.philoxweb\.be|
videos\.bugs\.social|
peertube\.malbert\.xyz|
peertube\.bilange\.ca|
libretube\.net|
diytelevision\.com|
peertube\.fedilab\.app|
libre\.video|
video\.mstddntfdn\.online|
us\.tv|
peertube\.sl-network\.fr|
peertube\.dynlinux\.io|
peertube\.david\.durieux\.family|
peertube\.linuxrocks\.online|
peerwatch\.xyz|
v\.kretschmann\.social|
tube\.otter\.sh|
yt\.is\.nota\.live|
tube\.dragonpsi\.xyz|
peertube\.boneheadmedia\.com|
videos\.funkwhale\.audio|
watch\.44con\.com|
peertube\.gcaillaut\.fr|
peertube\.icu|
pony\.tube|
spacepub\.space|
tube\.stbr\.io|
v\.mom-gay\.faith|
tube\.port0\.xyz|
peertube\.simounet\.net|
play\.jergefelt\.se|
peertube\.zeteo\.me|
tube\.danq\.me|
peertube\.kerenon\.com|
tube\.fab-l3\.org|
tube\.calculate\.social|
peertube\.mckillop\.org|
tube\.netzspielplatz\.de|
vod\.ksite\.de|
peertube\.laas\.fr|
tube\.govital\.net|
peertube\.stephenson\.cc|
bistule\.nohost\.me|
peertube\.kajalinifi\.de|
video\.ploud\.jp|
video\.omniatv\.com|
peertube\.ffs2play\.fr|
peertube\.leboulaire\.ovh|
peertube\.tronic-studio\.com|
peertube\.public\.cat|
peertube\.metalbanana\.net|
video\.1000i100\.fr|
peertube\.alter-nativ-voll\.de|
tube\.pasa\.tf|
tube\.worldofhauru\.xyz|
pt\.kamp\.site|
peertube\.teleassist\.fr|
videos\.mleduc\.xyz|
conf\.tube|
media\.privacyinternational\.org|
pt\.forty-two\.nl|
video\.halle-leaks\.de|
video\.grosskopfgames\.de|
peertube\.schaeferit\.de|
peertube\.jackbot\.fr|
tube\.extinctionrebellion\.fr|
peertube\.f-si\.org|
video\.subak\.ovh|
videos\.koweb\.fr|
peertube\.zergy\.net|
peertube\.roflcopter\.fr|
peertube\.floss-marketing-school\.com|
vloggers\.social|
peertube\.iriseden\.eu|
videos\.ubuntu-paris\.org|
peertube\.mastodon\.host|
armstube\.com|
peertube\.s2s\.video|
peertube\.lol|
tube\.open-plug\.eu|
open\.tube|
peertube\.ch|
peertube\.normandie-libre\.fr|
peertube\.slat\.org|
video\.lacaveatonton\.ovh|
peertube\.uno|
peertube\.servebeer\.com|
peertube\.fedi\.quebec|
tube\.h3z\.jp|
tube\.plus200\.com|
peertube\.eric\.ovh|
tube\.metadocs\.cc|
tube\.unmondemeilleur\.eu|
gouttedeau\.space|
video\.antirep\.net|
nrop\.cant\.at|
tube\.ksl-bmx\.de|
tube\.plaf\.fr|
tube\.tchncs\.de|
video\.devinberg\.com|
hitchtube\.fr|
peertube\.kosebamse\.com|
yunopeertube\.myddns\.me|
peertube\.varney\.fr|
peertube\.anon-kenkai\.com|
tube\.maiti\.info|
tubee\.fr|
videos\.dinofly\.com|
toobnix\.org|
videotape\.me|
voca\.tube|
video\.heromuster\.com|
video\.lemediatv\.fr|
video\.up\.edu\.ph|
balafon\.video|
video\.ivel\.fr|
thickrips\.cloud|
pt\.laurentkruger\.fr|
video\.monarch-pass\.net|
peertube\.artica\.center|
video\.alternanet\.fr|
indymotion\.fr|
fanvid\.stopthatimp\.net|
video\.farci\.org|
v\.lesterpig\.com|
video\.okaris\.de|
tube\.pawelko\.net|
peertube\.mablr\.org|
tube\.fede\.re|
pytu\.be|
evertron\.tv|
devtube\.dev-wiki\.de|
raptube\.antipub\.org|
video\.selea\.se|
peertube\.mygaia\.org|
video\.oh14\.de|
peertube\.livingutopia\.org|
peertube\.the-penguin\.de|
tube\.thechangebook\.org|
tube\.anjara\.eu|
pt\.pube\.tk|
video\.samedi\.pm|
mplayer\.demouliere\.eu|
widemus\.de|
peertube\.me|
peertube\.zapashcanon\.fr|
video\.latavernedejohnjohn\.fr|
peertube\.pcservice46\.fr|
peertube\.mazzonetto\.eu|
video\.irem\.univ-paris-diderot\.fr|
video\.livecchi\.cloud|
alttube\.fr|
video\.coop\.tools|
video\.cabane-libre\.org|
peertube\.openstreetmap\.fr|
videos\.alolise\.org|
irrsinn\.video|
video\.antopie\.org|
scitech\.video|
tube2\.nemsia\.org|
video\.amic37\.fr|
peertube\.freeforge\.eu|
video\.arbitrarion\.com|
video\.datsemultimedia\.com|
stoptrackingus\.tv|
peertube\.ricostrongxxx\.com|
docker\.videos\.lecygnenoir\.info|
peertube\.togart\.de|
tube\.postblue\.info|
videos\.domainepublic\.net|
peertube\.cyber-tribal\.com|
video\.gresille\.org|
peertube\.dsmouse\.net|
cinema\.yunohost\.support|
tube\.theocevaer\.fr|
repro\.video|
tube\.4aem\.com|
quaziinc\.com|
peertube\.metawurst\.space|
videos\.wakapo\.com|
video\.ploud\.fr|
video\.freeradical\.zone|
tube\.valinor\.fr|
refuznik\.video|
pt\.kircheneuenburg\.de|
peertube\.asrun\.eu|
peertube\.lagob\.fr|
videos\.side-ways\.net|
91video\.online|
video\.valme\.io|
video\.taboulisme\.com|
videos-libr\.es|
tv\.mooh\.fr|
nuage\.acostey\.fr|
video\.monsieur-a\.fr|
peertube\.librelois\.fr|
videos\.pair2jeux\.tube|
videos\.pueseso\.club|
peer\.mathdacloud\.ovh|
media\.assassinate-you\.net|
vidcommons\.org|
ptube\.rousset\.nom\.fr|
tube\.cyano\.at|
videos\.squat\.net|
video\.iphodase\.fr|
peertube\.makotoworkshop\.org|
peertube\.serveur\.slv-valbonne\.fr|
vault\.mle\.party|
hostyour\.tv|
videos\.hack2g2\.fr|
libre\.tube|
pire\.artisanlogiciel\.net|
videos\.numerique-en-commun\.fr|
video\.netsyms\.com|
video\.die-partei\.social|
video\.writeas\.org|
peertube\.swarm\.solvingmaz\.es|
tube\.pericoloso\.ovh|
watching\.cypherpunk\.observer|
videos\.adhocmusic\.com|
tube\.rfc1149\.net|
peertube\.librelabucm\.org|
videos\.numericoop\.fr|
peertube\.koehn\.com|
peertube\.anarchmusicall\.net|
tube\.kampftoast\.de|
vid\.y-y\.li|
peertube\.xtenz\.xyz|
diode\.zone|
tube\.egf\.mn|
peertube\.nomagic\.uk|
visionon\.tv|
videos\.koumoul\.com|
video\.rastapuls\.com|
video\.mantlepro\.com|
video\.deadsuperhero\.com|
peertube\.musicstudio\.pro|
peertube\.we-keys\.fr|
artitube\.artifaille\.fr|
peertube\.ethernia\.net|
tube\.midov\.pl|
peertube\.fr|
watch\.snoot\.tube|
peertube\.donnadieu\.fr|
argos\.aquilenet\.fr|
tube\.nemsia\.org|
tube\.bruniau\.net|
videos\.darckoune\.moe|
tube\.traydent\.info|
dev\.videos\.lecygnenoir\.info|
peertube\.nayya\.org|
peertube\.live|
peertube\.mofgao\.space|
video\.lequerrec\.eu|
peertube\.amicale\.net|
aperi\.tube|
tube\.ac-lyon\.fr|
video\.lw1\.at|
www\.yiny\.org|
videos\.pofilo\.fr|
tube\.lou\.lt|
choob\.h\.etbus\.ch|
tube\.hoga\.fr|
peertube\.heberge\.fr|
video\.obermui\.de|
videos\.cloudfrancois\.fr|
betamax\.video|
video\.typica\.us|
tube\.piweb\.be|
video\.blender\.org|
peertube\.cat|
tube\.kdy\.ch|
pe\.ertu\.be|
peertube\.social|
videos\.lescommuns\.org|
tv\.datamol\.org|
videonaute\.fr|
dialup\.express|
peertube\.nogafa\.org|
megatube\.lilomoino\.fr|
peertube\.tamanoir\.foucry\.net|
peertube\.devosi\.org|
peertube\.1312\.media|
tube\.bootlicker\.party|
skeptikon\.fr|
video\.blueline\.mg|
tube\.homecomputing\.fr|
tube\.ouahpiti\.info|
video\.tedomum\.net|
video\.g3l\.org|
fontube\.fr|
peertube\.gaialabs\.ch|
tube\.kher\.nl|
peertube\.qtg\.fr|
video\.migennes\.net|
tube\.p2p\.legal|
troll\.tv|
videos\.iut-orsay\.fr|
peertube\.solidev\.net|
videos\.cemea\.org|
video\.passageenseine\.fr|
videos\.festivalparminous\.org|
peertube\.touhoppai\.moe|
sikke\.fi|
peer\.hostux\.social|
share\.tube|
peertube\.walkingmountains\.fr|
videos\.benpro\.fr|
peertube\.parleur\.net|
peertube\.heraut\.eu|
tube\.aquilenet\.fr|
peertube\.gegeweb\.eu|
framatube\.org|
thinkerview\.video|
tube\.conferences-gesticulees\.net|
peertube\.datagueule\.tv|
video\.lqdn\.fr|
tube\.mochi\.academy|
media\.zat\.im|
video\.colibris-outilslibres\.org|
tube\.svnet\.fr|
peertube\.video|
peertube2\.cpy\.re|
peertube3\.cpy\.re|
videos\.tcit\.fr|
peertube\.cpy\.re|
canard\.tube
)'''
_UUID_RE = r'[\da-zA-Z]{22}|[\da-fA-F]{8}-[\da-fA-F]{4}-[\da-fA-F]{4}-[\da-fA-F]{4}-[\da-fA-F]{12}'
_API_BASE = 'https://%s/api/v1/videos/%s/%s'
_VALID_URL = r'''(?x)
(?:
peertube:(?P<host>[^:]+):|
https?://(?P<host_2>%s)/(?:videos/(?:watch|embed)|api/v\d/videos|w)/
)
(?P<id>%s)
''' % (_INSTANCES_RE, _UUID_RE)
_TESTS = [{
'url': 'https://framatube.org/videos/watch/9c9de5e8-0a1e-484a-b099-e80766180a6d',
'md5': '8563064d245a4be5705bddb22bb00a28',
'info_dict': {
'id': '9c9de5e8-0a1e-484a-b099-e80766180a6d',
'ext': 'mp4',
'title': 'What is PeerTube?',
'description': 'md5:3fefb8dde2b189186ce0719fda6f7b10',
'thumbnail': r're:https?://.*\.(?:jpg|png)',
'timestamp': 1538391166,
'upload_date': '20181001',
'uploader': 'Framasoft',
'uploader_id': '3',
'uploader_url': 'https://framatube.org/accounts/framasoft',
'channel': 'A propos de PeerTube',
'channel_id': '2215',
'channel_url': 'https://framatube.org/video-channels/joinpeertube',
'language': 'en',
'license': 'Attribution - Share Alike',
'duration': 113,
'view_count': int,
'like_count': int,
'dislike_count': int,
'tags': ['framasoft', 'peertube'],
'categories': ['Science & Technology'],
}
}, {
'url': 'https://peertube2.cpy.re/w/122d093a-1ede-43bd-bd34-59d2931ffc5e',
'info_dict': {
'id': '122d093a-1ede-43bd-bd34-59d2931ffc5e',
'ext': 'mp4',
'title': 'E2E tests',
'uploader_id': '37855',
'timestamp': 1589276219,
'upload_date': '20200512',
'uploader': 'chocobozzz',
}
}, {
'url': 'https://peertube2.cpy.re/w/3fbif9S3WmtTP8gGsC5HBd',
'info_dict': {
'id': '3fbif9S3WmtTP8gGsC5HBd',
'ext': 'mp4',
'title': 'E2E tests',
'uploader_id': '37855',
'timestamp': 1589276219,
'upload_date': '20200512',
'uploader': 'chocobozzz',
},
}, {
'url': 'https://peertube2.cpy.re/api/v1/videos/3fbif9S3WmtTP8gGsC5HBd',
'info_dict': {
'id': '3fbif9S3WmtTP8gGsC5HBd',
'ext': 'mp4',
'title': 'E2E tests',
'uploader_id': '37855',
'timestamp': 1589276219,
'upload_date': '20200512',
'uploader': 'chocobozzz',
},
}, {
# Issue #26002
'url': 'peertube:spacepub.space:d8943b2d-8280-497b-85ec-bc282ec2afdc',
'info_dict': {
'id': 'd8943b2d-8280-497b-85ec-bc282ec2afdc',
'ext': 'mp4',
'title': 'Dot matrix printer shell demo',
'uploader_id': '3',
'timestamp': 1587401293,
'upload_date': '20200420',
'uploader': 'Drew DeVault',
}
}, {
'url': 'https://peertube.debian.social/videos/watch/0b04f13d-1e18-4f1d-814e-4979aa7c9c44',
'only_matching': True,
}, {
# nsfw
'url': 'https://vod.ksite.de/videos/watch/9bb88cd3-9959-46d9-9ab9-33d2bb704c39',
'only_matching': True,
}, {
'url': 'https://vod.ksite.de/videos/embed/fed67262-6edb-4d1c-833b-daa9085c71d7',
'only_matching': True,
}, {
'url': 'https://peertube.tv/api/v1/videos/c1875674-97d0-4c94-a058-3f7e64c962e8',
'only_matching': True,
}, {
'url': 'peertube:framatube.org:b37a5b9f-e6b5-415c-b700-04a5cd6ec205',
'only_matching': True,
}]
@staticmethod
def _extract_peertube_url(webpage, source_url):
mobj = re.match(
r'https?://(?P<host>[^/]+)/(?:videos/(?:watch|embed)|w)/(?P<id>%s)'
% PeerTubeIE._UUID_RE, source_url)
if mobj and any(p in webpage for p in (
'meta property="og:platform" content="PeerTube"',
'<title>PeerTube<',
'There will be other non JS-based clients to access PeerTube',
'>We are sorry but it seems that PeerTube is not compatible with your web browser.<')):
return 'peertube:%s:%s' % mobj.group('host', 'id')
@staticmethod
def _extract_urls(webpage, source_url):
entries = re.findall(
r'''(?x)<iframe[^>]+\bsrc=["\'](?P<url>(?:https?:)?//%s/videos/embed/%s)'''
% (PeerTubeIE._INSTANCES_RE, PeerTubeIE._UUID_RE), webpage)
if not entries:
peertube_url = PeerTubeIE._extract_peertube_url(webpage, source_url)
if peertube_url:
entries = [peertube_url]
return entries
def _call_api(self, host, video_id, path, note=None, errnote=None, fatal=True):
return self._download_json(
self._API_BASE % (host, video_id, path), video_id,
note=note, errnote=errnote, fatal=fatal)
def _get_subtitles(self, host, video_id):
captions = self._call_api(
host, video_id, 'captions', note='Downloading captions JSON',
fatal=False)
if not isinstance(captions, dict):
return
data = captions.get('data')
if not isinstance(data, list):
return
subtitles = {}
for e in data:
language_id = try_get(e, lambda x: x['language']['id'], compat_str)
caption_url = urljoin('https://%s' % host, e.get('captionPath'))
if not caption_url:
continue
subtitles.setdefault(language_id or 'en', []).append({
'url': caption_url,
})
return subtitles
def _real_extract(self, url):
mobj = self._match_valid_url(url)
host = mobj.group('host') or mobj.group('host_2')
video_id = mobj.group('id')
video = self._call_api(
host, video_id, '', note='Downloading video JSON')
title = video['name']
formats = []
files = video.get('files') or []
for playlist in (video.get('streamingPlaylists') or []):
if not isinstance(playlist, dict):
continue
playlist_files = playlist.get('files')
if not (playlist_files and isinstance(playlist_files, list)):
continue
files.extend(playlist_files)
for file_ in files:
if not isinstance(file_, dict):
continue
file_url = url_or_none(file_.get('fileUrl'))
if not file_url:
continue
file_size = int_or_none(file_.get('size'))
format_id = try_get(
file_, lambda x: x['resolution']['label'], compat_str)
f = parse_resolution(format_id)
f.update({
'url': file_url,
'format_id': format_id,
'filesize': file_size,
})
if format_id == '0p':
f['vcodec'] = 'none'
else:
f['fps'] = int_or_none(file_.get('fps'))
formats.append(f)
self._sort_formats(formats)
description = video.get('description')
if description and len(description) >= 250:
# description is shortened
full_description = self._call_api(
host, video_id, 'description', note='Downloading description JSON',
fatal=False)
if isinstance(full_description, dict):
description = str_or_none(full_description.get('description')) or description
subtitles = self.extract_subtitles(host, video_id)
def data(section, field, type_):
return try_get(video, lambda x: x[section][field], type_)
def account_data(field, type_):
return data('account', field, type_)
def channel_data(field, type_):
return data('channel', field, type_)
category = data('category', 'label', compat_str)
categories = [category] if category else None
nsfw = video.get('nsfw')
if nsfw is bool:
age_limit = 18 if nsfw else 0
else:
age_limit = None
webpage_url = 'https://%s/videos/watch/%s' % (host, video_id)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': urljoin(webpage_url, video.get('thumbnailPath')),
'timestamp': unified_timestamp(video.get('publishedAt')),
'uploader': account_data('displayName', compat_str),
'uploader_id': str_or_none(account_data('id', int)),
'uploader_url': url_or_none(account_data('url', compat_str)),
'channel': channel_data('displayName', compat_str),
'channel_id': str_or_none(channel_data('id', int)),
'channel_url': url_or_none(channel_data('url', compat_str)),
'language': data('language', 'id', compat_str),
'license': data('licence', 'label', compat_str),
'duration': int_or_none(video.get('duration')),
'view_count': int_or_none(video.get('views')),
'like_count': int_or_none(video.get('likes')),
'dislike_count': int_or_none(video.get('dislikes')),
'age_limit': age_limit,
'tags': try_get(video, lambda x: x['tags'], list),
'categories': categories,
'formats': formats,
'subtitles': subtitles,
'webpage_url': webpage_url,
}
class PeerTubePlaylistIE(InfoExtractor):
IE_NAME = 'PeerTube:Playlist'
_TYPES = {
'a': 'accounts',
'c': 'video-channels',
'w/p': 'video-playlists',
}
_VALID_URL = r'''(?x)
https?://(?P<host>%s)/(?P<type>(?:%s))/
(?P<id>[^/]+)
''' % (PeerTubeIE._INSTANCES_RE, '|'.join(_TYPES.keys()))
_TESTS = [{
'url': 'https://peertube.tux.ovh/w/p/3af94cba-95e8-4b74-b37a-807ab6d82526',
'info_dict': {
'id': '3af94cba-95e8-4b74-b37a-807ab6d82526',
'description': 'playlist',
'timestamp': 1611171863,
'title': 'playlist',
},
'playlist_mincount': 6,
}, {
'url': 'https://peertube.tux.ovh/w/p/wkyqcQBnsvFxtUB2pkYc1e',
'info_dict': {
'id': 'wkyqcQBnsvFxtUB2pkYc1e',
'description': 'Cette liste de vidéos contient uniquement les jeux qui peuvent être terminés en une seule vidéo.',
'title': 'Let\'s Play',
'timestamp': 1604147331,
},
'playlist_mincount': 6,
}, {
'url': 'https://peertube.debian.social/w/p/hFdJoTuyhNJVa1cDWd1d12',
'info_dict': {
'id': 'hFdJoTuyhNJVa1cDWd1d12',
'description': 'Diversas palestras do Richard Stallman no Brasil.',
'title': 'Richard Stallman no Brasil',
'timestamp': 1599676222,
},
'playlist_mincount': 9,
}, {
'url': 'https://peertube2.cpy.re/a/chocobozzz/videos',
'info_dict': {
'id': 'chocobozzz',
'timestamp': 1553874564,
'title': 'chocobozzz',
},
'playlist_mincount': 2,
}, {
'url': 'https://framatube.org/c/bf54d359-cfad-4935-9d45-9d6be93f63e8/videos',
'info_dict': {
'id': 'bf54d359-cfad-4935-9d45-9d6be93f63e8',
'timestamp': 1519917377,
'title': 'Les vidéos de Framasoft',
},
'playlist_mincount': 345,
}, {
'url': 'https://peertube2.cpy.re/c/blender_open_movies@video.blender.org/videos',
'info_dict': {
'id': 'blender_open_movies@video.blender.org',
'timestamp': 1542287810,
'title': 'Official Blender Open Movies',
},
'playlist_mincount': 11,
}]
_API_BASE = 'https://%s/api/v1/%s/%s%s'
_PAGE_SIZE = 30
def call_api(self, host, name, path, base, **kwargs):
return self._download_json(
self._API_BASE % (host, base, name, path), name, **kwargs)
def fetch_page(self, host, id, type, page):
page += 1
video_data = self.call_api(
host, id,
f'/videos?sort=-createdAt&start={self._PAGE_SIZE * (page - 1)}&count={self._PAGE_SIZE}&nsfw=both',
type, note=f'Downloading page {page}').get('data', [])
for video in video_data:
shortUUID = video.get('shortUUID') or try_get(video, lambda x: x['video']['shortUUID'])
video_title = video.get('name') or try_get(video, lambda x: x['video']['name'])
yield self.url_result(
f'https://{host}/w/{shortUUID}', PeerTubeIE.ie_key(),
video_id=shortUUID, video_title=video_title)
def _extract_playlist(self, host, type, id):
info = self.call_api(host, id, '', type, note='Downloading playlist information', fatal=False)
playlist_title = info.get('displayName')
playlist_description = info.get('description')
playlist_timestamp = unified_timestamp(info.get('createdAt'))
channel = try_get(info, lambda x: x['ownerAccount']['name']) or info.get('displayName')
channel_id = try_get(info, lambda x: x['ownerAccount']['id']) or info.get('id')
thumbnail = format_field(info, 'thumbnailPath', f'https://{host}%s')
entries = OnDemandPagedList(functools.partial(
self.fetch_page, host, id, type), self._PAGE_SIZE)
return self.playlist_result(
entries, id, playlist_title, playlist_description,
timestamp=playlist_timestamp, channel=channel, channel_id=channel_id, thumbnail=thumbnail)
def _real_extract(self, url):
type, host, id = self._match_valid_url(url).group('type', 'host', 'id')
type = self._TYPES[type]
return self._extract_playlist(host, type, id)
| 46.985745 | 126 | 0.371885 | 4,660 | 65,921 | 5.199785 | 0.343777 | 0.022286 | 0.016095 | 0.019603 | 0.075399 | 0.046882 | 0.027898 | 0.023441 | 0.018241 | 0.018241 | 0 | 0.023396 | 0.518257 | 65,921 | 1,402 | 127 | 47.019258 | 0.739616 | 0.000834 | 0 | 0.059297 | 0 | 0.005124 | 0.852689 | 0.214423 | 0 | 0 | 0.000061 | 0 | 0 | 1 | 0.008785 | false | 0.002928 | 0.004392 | 0.00366 | 0.032211 | 0.000732 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0b2c51a68cb94aeae025224ace6e5ef1cd69cf3 | 559 | py | Python | dataactcore/migrations/versions/20b5109967bf_.py | brianherman/data-act-broker-backend | 80eb055b9d245046192f7ad4fd0be7d0e11d2dec | [
"CC0-1.0"
] | 1 | 2019-06-22T21:53:16.000Z | 2019-06-22T21:53:16.000Z | dataactcore/migrations/versions/20b5109967bf_.py | brianherman/data-act-broker-backend | 80eb055b9d245046192f7ad4fd0be7d0e11d2dec | [
"CC0-1.0"
] | 3 | 2021-08-22T11:47:45.000Z | 2022-03-29T22:06:49.000Z | dataactcore/migrations/versions/20b5109967bf_.py | brianherman/data-act-broker-backend | 80eb055b9d245046192f7ad4fd0be7d0e11d2dec | [
"CC0-1.0"
] | 1 | 2020-07-17T23:50:56.000Z | 2020-07-17T23:50:56.000Z | """empty message
Revision ID: 20b5109967bf
Revises: 17ec44522729, d7e2e541f6d6
Create Date: 2017-04-03 11:25:27.381702
"""
# revision identifiers, used by Alembic.
revision = '20b5109967bf'
down_revision = ('17ec44522729', 'd7e2e541f6d6')
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
pass
def downgrade_data_broker():
pass
| 15.108108 | 48 | 0.729875 | 70 | 559 | 5.642857 | 0.614286 | 0.101266 | 0.086076 | 0.070886 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.152866 | 0.157424 | 559 | 36 | 49 | 15.527778 | 0.685775 | 0.27907 | 0 | 0.142857 | 0 | 0 | 0.147208 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0.142857 | 0.142857 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
e0b5319dac20a5ecb10130dc6d9b7b28c0c7858b | 209 | py | Python | setup.py | sssunda/flask-jwt-auth | 12c91ea5668926292e67a7dea722873851423ce6 | [
"MIT"
] | 1 | 2020-01-29T15:29:58.000Z | 2020-01-29T15:29:58.000Z | setup.py | sssunda/flask-jwt-auth | 12c91ea5668926292e67a7dea722873851423ce6 | [
"MIT"
] | null | null | null | setup.py | sssunda/flask-jwt-auth | 12c91ea5668926292e67a7dea722873851423ce6 | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name='flask-jwt-auth',
version='1.0.0',
author='desun',
packages=['apps'],
include_package_data=True,
install_requires=[
'flask',
],
)
| 14.928571 | 30 | 0.593301 | 24 | 209 | 5.041667 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019108 | 0.248804 | 209 | 13 | 31 | 16.076923 | 0.751592 | 0 | 0 | 0 | 0 | 0 | 0.157895 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.090909 | 0 | 0.090909 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0b5395da4644e47c678dcede28d3a911f1282b3 | 34,969 | py | Python | pysnmp-with-texts/ADTRAN-IF-PERF-HISTORY-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/ADTRAN-IF-PERF-HISTORY-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/ADTRAN-IF-PERF-HISTORY-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module ADTRAN-IF-PERF-HISTORY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ADTRAN-IF-PERF-HISTORY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:14:54 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
adGenAOSConformance, adGenAOSCommon = mibBuilder.importSymbols("ADTRAN-AOS", "adGenAOSConformance", "adGenAOSCommon")
adIdentity, = mibBuilder.importSymbols("ADTRAN-MIB", "adIdentity")
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
HCPerfTotalCount, HCPerfValidIntervals, HCPerfInvalidIntervals, HCPerfCurrentCount, HCPerfIntervalCount, HCPerfTimeElapsed = mibBuilder.importSymbols("HC-PerfHist-TC-MIB", "HCPerfTotalCount", "HCPerfValidIntervals", "HCPerfInvalidIntervals", "HCPerfCurrentCount", "HCPerfIntervalCount", "HCPerfTimeElapsed")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
Integer32, ObjectIdentity, IpAddress, Bits, ModuleIdentity, Gauge32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, iso, TimeTicks, Unsigned32, Counter64, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "ObjectIdentity", "IpAddress", "Bits", "ModuleIdentity", "Gauge32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "iso", "TimeTicks", "Unsigned32", "Counter64", "NotificationType")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
adGenAosIfPerfHistoryMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 664, 6, 10000, 53, 1, 7))
adGenAosIfPerfHistoryMib.setRevisions(('2013-08-23 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: adGenAosIfPerfHistoryMib.setRevisionsDescriptions(('Initial version',))
if mibBuilder.loadTexts: adGenAosIfPerfHistoryMib.setLastUpdated('201308230000Z')
if mibBuilder.loadTexts: adGenAosIfPerfHistoryMib.setOrganization('ADTRAN Inc.')
if mibBuilder.loadTexts: adGenAosIfPerfHistoryMib.setContactInfo('Info: www.adtran.com Postal: ADTRAN, Inc. 901 Explorer Blvd. Huntsville, AL 35806 Tel: +1 888 423-8726 E-mail: support@adtran.com')
if mibBuilder.loadTexts: adGenAosIfPerfHistoryMib.setDescription('This MIB module defines high capacity performance statistics for interfaces within an AOS product. Copyright (C) ADTRAN, Inc. (2013).')
adGenAosIfPerfHistory = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7))
adIfPhCurTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1), )
if mibBuilder.loadTexts: adIfPhCurTable.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurTable.setDescription('This table contains current performance history information that has been recorded since the last 15 minute interval ended and from when the last 1 day interval ended. This table is indexed by by ifIndex which SHOULD be maintained in a persistent manner.')
adIfPhCurEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: adIfPhCurEntry.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurEntry.setDescription("This specifies the information contained in one entry of the adIfPerfHistoryCurTable. It is indexed by an interface's IfIndex.")
adIfPhCurTimeElapsed15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 1), HCPerfTimeElapsed()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurTimeElapsed15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurTimeElapsed15Min.setDescription('Total elapsed seconds in the current 15 minute interval.')
adIfPhCurValidIntervals15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 2), HCPerfValidIntervals()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurValidIntervals15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurValidIntervals15Min.setDescription('Number of valid 15 minute intervals over the last 24 hours.')
adIfPhCurInvalidIntervals15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 3), HCPerfInvalidIntervals()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInvalidIntervals15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInvalidIntervals15Min.setDescription('Number of invalid 15 minute intervals over the last 24 hours.')
adIfPhCurInOctets15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 4), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInOctets15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInOctets15Min.setDescription('Count of octets received in the current 15 minute interval.')
adIfPhCurInUcastPkts15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 5), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInUcastPkts15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInUcastPkts15Min.setDescription('Count of unicast packets received in the current 15 minute interval.')
adIfPhCurInMcastPkts15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 6), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInMcastPkts15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInMcastPkts15Min.setDescription('Count of multicast packets received in the current 15 minute interval.')
adIfPhCurInBcastPkts15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 7), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInBcastPkts15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInBcastPkts15Min.setDescription('Count of broadcast packets received in the current 15 minute interval.')
adIfPhCurInDiscards15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 8), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInDiscards15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInDiscards15Min.setDescription('Count of inbound packets discarded in the current 15 minute interval.')
adIfPhCurInErrors15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 9), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInErrors15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInErrors15Min.setDescription('Count of inbound packets containing errors in the current 15 minute interval.')
adIfPhCurInUnknownProtos15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 10), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInUnknownProtos15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInUnknownProtos15Min.setDescription('Count of inbound packets with an unknown or unsupported protocol in the current 15 minute interval.')
adIfPhCurOutOctets15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 11), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutOctets15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutOctets15Min.setDescription('Count of octets transmitted in the current 15 minute interval.')
adIfPhCurOutUcastPkts15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 12), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutUcastPkts15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutUcastPkts15Min.setDescription('Count of transmitted unicast packets in the current 15 minute interval.')
adIfPhCurOutMcastPkts15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 13), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutMcastPkts15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutMcastPkts15Min.setDescription('Count of transmitted multicast packets in the current 15 minute interval.')
adIfPhCurOutBcastPkts15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 14), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutBcastPkts15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutBcastPkts15Min.setDescription('Count of transmitted broadcast packets in the current 15 minute interval.')
adIfPhCurOutDiscards15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 15), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutDiscards15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutDiscards15Min.setDescription('Count of discarded outbound packets in the current 15 minute interval.')
adIfPhCurOutErrors15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 16), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutErrors15Min.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutErrors15Min.setDescription('Count of outbound packets that could not be transmitted due to error in the current 15 minute interval.')
adIfPhCurTimeElapsed1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 17), HCPerfTimeElapsed()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurTimeElapsed1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurTimeElapsed1Day.setDescription('Total elapsed seconds in the current 1 day interval.')
adIfPhCurValidIntervals1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 18), HCPerfValidIntervals()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurValidIntervals1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurValidIntervals1Day.setDescription('Number of valid 1 day intervals available.')
adIfPhCurInvalidIntervals1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 19), HCPerfInvalidIntervals()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInvalidIntervals1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInvalidIntervals1Day.setDescription('Number of invalid 1 day intervals available.')
adIfPhCurInOctets1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 20), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInOctets1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInOctets1Day.setDescription('Count of octets received in the current 1 day interval.')
adIfPhCurInUcastPkts1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 21), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInUcastPkts1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInUcastPkts1Day.setDescription('Count of unicast packets received in the current 1 day interval.')
adIfPhCurInMcastPkts1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 22), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInMcastPkts1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInMcastPkts1Day.setDescription('Count of multicast packets received in the current 1 day interval.')
adIfPhCurInBcastPkts1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 23), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInBcastPkts1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInBcastPkts1Day.setDescription('Count of broadcast packets received in the current 1 day interval.')
adIfPhCurInDiscards1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 24), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInDiscards1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInDiscards1Day.setDescription('Count of inbound packets discarded in the current 1 day interval.')
adIfPhCurInErrors1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 25), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInErrors1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInErrors1Day.setDescription('Count of inbound packets containing errors in the current 1 day interval.')
adIfPhCurInUnknownProtos1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 26), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurInUnknownProtos1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurInUnknownProtos1Day.setDescription('Count of inbound packets with an unknown or unsupported protocol in the current 1 day interval.')
adIfPhCurOutOctets1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 27), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutOctets1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutOctets1Day.setDescription('Count of octets transmitted in the current 1 day interval.')
adIfPhCurOutUcastPkts1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 28), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutUcastPkts1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutUcastPkts1Day.setDescription('Count of transmitted unicast packets in the current 1 day interval.')
adIfPhCurOutMcastPkts1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 29), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutMcastPkts1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutMcastPkts1Day.setDescription('Count of transmitted multicast packets in the current 1 day interval.')
adIfPhCurOutBcastPkts1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 30), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutBcastPkts1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutBcastPkts1Day.setDescription('Count of transmitted broadcast packets in the current 1 day interval.')
adIfPhCurOutDiscards1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 31), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutDiscards1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutDiscards1Day.setDescription('Count of discarded outbound packets in the current 1 day interval.')
adIfPhCurOutErrors1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 1, 1, 32), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPhCurOutErrors1Day.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurOutErrors1Day.setDescription('Count of outbound packets that could not be transmitted due to error in the current 1 day interval.')
adIfPh15MinIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2), )
if mibBuilder.loadTexts: adIfPh15MinIntervalTable.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinIntervalTable.setDescription('This table contains performance history information for each valid 15 minute interval. This table is indexed by by ifIndex and the interval number.')
adIfPh15MinIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinIntervalNumber"))
if mibBuilder.loadTexts: adIfPh15MinIntervalEntry.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinIntervalEntry.setDescription('An entry in the adIfPh15MinIntervalTable.')
adIfPh15MinIntervalNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96)))
if mibBuilder.loadTexts: adIfPh15MinIntervalNumber.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinIntervalNumber.setDescription('Performance history interval number. Interval 1 is the most recent previous interval; interval 96 is 24 hours ago. Intervals 2..96 are optional.')
adIfPh15MinInOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 2), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinInOctets.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinInOctets.setDescription('Count of octets received in the 15 minute interval.')
adIfPh15MinInUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 3), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinInUcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinInUcastPkts.setDescription('Count of unicast packets received in the 15 minute interval.')
adIfPh15MinInMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 4), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinInMcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinInMcastPkts.setDescription('Count of multicast packets received in the 15 minute interval.')
adIfPh15MinInBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 5), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinInBcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinInBcastPkts.setDescription('Count of broadcast packets received in the 15 minute interval.')
adIfPh15MinInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 6), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinInDiscards.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinInDiscards.setDescription('Count of inbound packets discarded in the 15 minute interval.')
adIfPh15MinInErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 7), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinInErrors.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinInErrors.setDescription('Count of inbound packets containing errors in the 15 minute interval.')
adIfPh15MinInUnknownProtos = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 8), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinInUnknownProtos.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinInUnknownProtos.setDescription('Count of inbound packets with an unknown or unsupported protocol in the 15 minute interval.')
adIfPh15MinOutOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 9), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinOutOctets.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinOutOctets.setDescription('Count of octets transmitted in the 15 minute interval.')
adIfPh15MinOutUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 10), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinOutUcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinOutUcastPkts.setDescription('Count of transmitted unicast packets in the 15 minute interval.')
adIfPh15MinOutMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 11), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinOutMcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinOutMcastPkts.setDescription('Count of transmitted multicast packets in the 15 minute interval.')
adIfPh15MinOutBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 12), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinOutBcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinOutBcastPkts.setDescription('Count of transmitted broadcast packets in the 15 minute interval.')
adIfPh15MinOutDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 13), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinOutDiscards.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinOutDiscards.setDescription('Count of discarded outbound packets in the 15 minute interval.')
adIfPh15MinOutErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 2, 1, 14), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh15MinOutErrors.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinOutErrors.setDescription('Count of outbound packets that could not be transmitted due to error in the 15 minute interval.')
adIfPh1DayIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3), )
if mibBuilder.loadTexts: adIfPh1DayIntervalTable.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayIntervalTable.setDescription('This table contains performance history information for each valid 1 day interval. This table is indexed by by ifIndex and the interval number.')
adIfPh1DayIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayIntervalNumber"))
if mibBuilder.loadTexts: adIfPh1DayIntervalEntry.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayIntervalEntry.setDescription('An entry in the adIfPh1DayIntervalTable.')
adIfPh1DayIntervalNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30)))
if mibBuilder.loadTexts: adIfPh1DayIntervalNumber.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayIntervalNumber.setDescription('Performance history interval number. Interval 1 is the most recent previous day; interval 7 is 7 days ago. Intervals 2..30 are optional.')
adIfPh1DayInOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 2), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayInOctets.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayInOctets.setDescription('Count of octets received in the 1 day interval.')
adIfPh1DayInUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 3), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayInUcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayInUcastPkts.setDescription('Count of unicast packets received in the 1 day interval.')
adIfPh1DayInMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 4), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayInMcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayInMcastPkts.setDescription('Count of multicast packets received in the 1 day interval.')
adIfPh1DayInBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 5), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayInBcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayInBcastPkts.setDescription('Count of broadcast packets received in the 1 day interval.')
adIfPh1DayInDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 6), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayInDiscards.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayInDiscards.setDescription('Count of inbound packets discarded in the 1 day interval.')
adIfPh1DayInErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 7), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayInErrors.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayInErrors.setDescription('Count of inbound packets containing errors in the 1 day interval.')
adIfPh1DayInUnknownProtos = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 8), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayInUnknownProtos.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayInUnknownProtos.setDescription('Count of inbound packets with an unknown or unsupported protocol in the 1 day interval.')
adIfPh1DayOutOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 9), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayOutOctets.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayOutOctets.setDescription('Count of octets transmitted in the 1 day interval.')
adIfPh1DayOutUcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 10), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayOutUcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayOutUcastPkts.setDescription('Count of transmitted unicast packets in the 1 day interval.')
adIfPh1DayOutMcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 11), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayOutMcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayOutMcastPkts.setDescription('Count of transmitted multicast packets in the 1 day interval.')
adIfPh1DayOutBcastPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 12), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayOutBcastPkts.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayOutBcastPkts.setDescription('Count of transmitted broadcast packets in the 1 day interval.')
adIfPh1DayOutDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 13), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayOutDiscards.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayOutDiscards.setDescription('Count of discarded outbound packets in the 1 day interval.')
adIfPh1DayOutErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 1, 7, 3, 1, 14), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adIfPh1DayOutErrors.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayOutErrors.setDescription('Count of outbound packets that could not be transmitted due to error in the 1 day interval.')
adGenAosIfPerfHistoryConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 16))
adGenAosIfPerfHistoryGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 16, 1))
adGenAosIfPerfHistoryCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 16, 2))
adGenAosIfPerfHistoryCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 16, 2, 1)).setObjects(("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurGroup"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinIntervalGroup"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayIntervalGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adGenAosIfPerfHistoryCompliance = adGenAosIfPerfHistoryCompliance.setStatus('current')
if mibBuilder.loadTexts: adGenAosIfPerfHistoryCompliance.setDescription('The compliance statement for SNMPv2 entities which implement interface performance history.')
adIfPhCurGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 16, 1, 1)).setObjects(("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurTimeElapsed15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurValidIntervals15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInvalidIntervals15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInOctets15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInUcastPkts15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInMcastPkts15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInBcastPkts15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInDiscards15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInErrors15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInUnknownProtos15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutOctets15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutUcastPkts15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutMcastPkts15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutBcastPkts15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutDiscards15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutErrors15Min"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurTimeElapsed1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurValidIntervals1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInvalidIntervals1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInOctets1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInUcastPkts1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInMcastPkts1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInBcastPkts1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInDiscards1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInErrors1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurInUnknownProtos1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutOctets1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutUcastPkts1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutMcastPkts1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutBcastPkts1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutDiscards1Day"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPhCurOutErrors1Day"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adIfPhCurGroup = adIfPhCurGroup.setStatus('current')
if mibBuilder.loadTexts: adIfPhCurGroup.setDescription('The Current Group.')
adIfPh15MinIntervalGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 16, 1, 2)).setObjects(("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinInOctets"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinInUcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinInMcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinInBcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinInDiscards"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinInErrors"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinInUnknownProtos"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinOutOctets"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinOutUcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinOutMcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinOutBcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinOutDiscards"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh15MinOutErrors"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adIfPh15MinIntervalGroup = adIfPh15MinIntervalGroup.setStatus('current')
if mibBuilder.loadTexts: adIfPh15MinIntervalGroup.setDescription('The 15 minute interval group.')
adIfPh1DayIntervalGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 16, 1, 3)).setObjects(("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayInOctets"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayInUcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayInMcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayInBcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayInDiscards"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayInErrors"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayInUnknownProtos"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayOutOctets"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayOutUcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayOutMcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayOutBcastPkts"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayOutDiscards"), ("ADTRAN-IF-PERF-HISTORY-MIB", "adIfPh1DayOutErrors"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adIfPh1DayIntervalGroup = adIfPh1DayIntervalGroup.setStatus('current')
if mibBuilder.loadTexts: adIfPh1DayIntervalGroup.setDescription('The 1 day interval group.')
mibBuilder.exportSymbols("ADTRAN-IF-PERF-HISTORY-MIB", adIfPh15MinInBcastPkts=adIfPh15MinInBcastPkts, adIfPhCurInvalidIntervals1Day=adIfPhCurInvalidIntervals1Day, adIfPh15MinIntervalNumber=adIfPh15MinIntervalNumber, adIfPh15MinIntervalTable=adIfPh15MinIntervalTable, adIfPhCurOutMcastPkts15Min=adIfPhCurOutMcastPkts15Min, adIfPhCurOutDiscards15Min=adIfPhCurOutDiscards15Min, adIfPhCurOutUcastPkts15Min=adIfPhCurOutUcastPkts15Min, adIfPh1DayInDiscards=adIfPh1DayInDiscards, adGenAosIfPerfHistory=adGenAosIfPerfHistory, adIfPh1DayInErrors=adIfPh1DayInErrors, adIfPhCurInUcastPkts1Day=adIfPhCurInUcastPkts1Day, adIfPhCurInMcastPkts15Min=adIfPhCurInMcastPkts15Min, adIfPhCurInUnknownProtos1Day=adIfPhCurInUnknownProtos1Day, adIfPh15MinInUcastPkts=adIfPh15MinInUcastPkts, adIfPh1DayIntervalNumber=adIfPh1DayIntervalNumber, adIfPh15MinInMcastPkts=adIfPh15MinInMcastPkts, adIfPhCurOutOctets1Day=adIfPhCurOutOctets1Day, adIfPhCurInUcastPkts15Min=adIfPhCurInUcastPkts15Min, adIfPhCurInUnknownProtos15Min=adIfPhCurInUnknownProtos15Min, adIfPh1DayInBcastPkts=adIfPh1DayInBcastPkts, adIfPhCurInBcastPkts15Min=adIfPhCurInBcastPkts15Min, adIfPhCurInErrors15Min=adIfPhCurInErrors15Min, adIfPhCurOutOctets15Min=adIfPhCurOutOctets15Min, adIfPhCurOutMcastPkts1Day=adIfPhCurOutMcastPkts1Day, adIfPhCurInvalidIntervals15Min=adIfPhCurInvalidIntervals15Min, adIfPh15MinIntervalEntry=adIfPh15MinIntervalEntry, adIfPhCurOutDiscards1Day=adIfPhCurOutDiscards1Day, adIfPh15MinInUnknownProtos=adIfPh15MinInUnknownProtos, adIfPhCurInDiscards15Min=adIfPhCurInDiscards15Min, adIfPh1DayIntervalEntry=adIfPh1DayIntervalEntry, adIfPhCurInErrors1Day=adIfPhCurInErrors1Day, adIfPhCurInDiscards1Day=adIfPhCurInDiscards1Day, adIfPh15MinInOctets=adIfPh15MinInOctets, adIfPhCurOutBcastPkts15Min=adIfPhCurOutBcastPkts15Min, adIfPh15MinInDiscards=adIfPh15MinInDiscards, adIfPhCurOutErrors15Min=adIfPhCurOutErrors15Min, adIfPhCurValidIntervals15Min=adIfPhCurValidIntervals15Min, adIfPh1DayOutBcastPkts=adIfPh1DayOutBcastPkts, adGenAosIfPerfHistoryCompliance=adGenAosIfPerfHistoryCompliance, adIfPh15MinOutOctets=adIfPh15MinOutOctets, adGenAosIfPerfHistoryConformance=adGenAosIfPerfHistoryConformance, adIfPh1DayIntervalTable=adIfPh1DayIntervalTable, adIfPh15MinIntervalGroup=adIfPh15MinIntervalGroup, adIfPh15MinOutUcastPkts=adIfPh15MinOutUcastPkts, adIfPh1DayInMcastPkts=adIfPh1DayInMcastPkts, adIfPhCurTable=adIfPhCurTable, adIfPh1DayInUcastPkts=adIfPh1DayInUcastPkts, adGenAosIfPerfHistoryMib=adGenAosIfPerfHistoryMib, adIfPhCurTimeElapsed15Min=adIfPhCurTimeElapsed15Min, adIfPhCurValidIntervals1Day=adIfPhCurValidIntervals1Day, adIfPhCurInBcastPkts1Day=adIfPhCurInBcastPkts1Day, adIfPh15MinOutDiscards=adIfPh15MinOutDiscards, PYSNMP_MODULE_ID=adGenAosIfPerfHistoryMib, adIfPhCurEntry=adIfPhCurEntry, adIfPh1DayOutOctets=adIfPh1DayOutOctets, adIfPh1DayOutErrors=adIfPh1DayOutErrors, adIfPh1DayOutUcastPkts=adIfPh1DayOutUcastPkts, adIfPhCurInOctets15Min=adIfPhCurInOctets15Min, adIfPh1DayInOctets=adIfPh1DayInOctets, adIfPh1DayInUnknownProtos=adIfPh1DayInUnknownProtos, adIfPhCurOutUcastPkts1Day=adIfPhCurOutUcastPkts1Day, adIfPh1DayOutMcastPkts=adIfPh1DayOutMcastPkts, adGenAosIfPerfHistoryCompliances=adGenAosIfPerfHistoryCompliances, adIfPhCurInMcastPkts1Day=adIfPhCurInMcastPkts1Day, adGenAosIfPerfHistoryGroups=adGenAosIfPerfHistoryGroups, adIfPhCurGroup=adIfPhCurGroup, adIfPhCurOutBcastPkts1Day=adIfPhCurOutBcastPkts1Day, adIfPh15MinOutMcastPkts=adIfPh15MinOutMcastPkts, adIfPhCurTimeElapsed1Day=adIfPhCurTimeElapsed1Day, adIfPh1DayOutDiscards=adIfPh1DayOutDiscards, adIfPh1DayIntervalGroup=adIfPh1DayIntervalGroup, adIfPhCurOutErrors1Day=adIfPhCurOutErrors1Day, adIfPh15MinOutBcastPkts=adIfPh15MinOutBcastPkts, adIfPh15MinOutErrors=adIfPh15MinOutErrors, adIfPh15MinInErrors=adIfPh15MinInErrors, adIfPhCurInOctets1Day=adIfPhCurInOctets1Day)
| 141.574899 | 3,806 | 0.794933 | 3,835 | 34,969 | 7.247979 | 0.088396 | 0.060872 | 0.106526 | 0.010793 | 0.546266 | 0.39315 | 0.244604 | 0.220068 | 0.178479 | 0.141351 | 0 | 0.067667 | 0.082931 | 34,969 | 246 | 3,807 | 142.150407 | 0.799089 | 0.009894 | 0 | 0.021097 | 0 | 0.033755 | 0.293841 | 0.088692 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.042194 | 0 | 0.042194 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0c3c613f7786f43391c237fd48335f6496eb945 | 10,456 | py | Python | xpxchain/models/__init__.py | Sharmelen/python-xpx-chain-sdk | a1bfb1b7da20d50dd5049a950794955d1c492a71 | [
"Apache-2.0"
] | 1 | 2021-02-02T12:46:51.000Z | 2021-02-02T12:46:51.000Z | xpxchain/models/__init__.py | Sharmelen/python-xpx-chain-sdk | a1bfb1b7da20d50dd5049a950794955d1c492a71 | [
"Apache-2.0"
] | 4 | 2021-02-06T06:14:27.000Z | 2021-07-20T11:41:44.000Z | xpxchain/models/__init__.py | Sharmelen/python-xpx-chain-sdk | a1bfb1b7da20d50dd5049a950794955d1c492a71 | [
"Apache-2.0"
] | 1 | 2021-08-09T15:45:21.000Z | 2021-08-09T15:45:21.000Z | """
dto
===
High-level NEM models.
License
-------
Copyright 2019 NEM
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Do not import any of these models into the in the
# __init__ of the subdirectories, since there is a
# complicated web of inter-dependencies within models.
# Just use glob imports at the models level.
# Account
from .account.account import *
from .account.account_info import *
from .account.account_meta import *
from .account.account_property import *
from .account.account_properties import *
from .account.address import *
from .account.multisig_account_graph_info import *
from .account.multisig_account_info import *
from .account.property_modification_type import *
from .account.property_type import *
from .account.public_account import *
from .account.account_names import *
from .account.account_balance import *
from .account import *
# Blockchain
from .blockchain.block_info import *
from .blockchain.block_type import *
from .blockchain.blockchain_score import *
from .blockchain.blockchain_storage_info import *
from .blockchain.blockchain_server_info import *
from .blockchain.network_type import *
from .blockchain import *
# Receipt
from .receipt.balance_change_receipt import *
from .receipt.balance_transfer_receipt import *
from .receipt.artifact_expiry_receipt import *
from .receipt.inflation_receipt import *
from .receipt.receipt_base import *
from .receipt.receipt import *
from .receipt.receipt_type import *
from .receipt.receipt_version import *
from .receipt.source import *
from .receipt.statements import *
from .receipt.transaction_statement import *
from .receipt.resolution_statement import *
from .receipt.resolution_entry import *
from .receipt import *
# Config
from .config.catapult_config import *
from .config.catapult_upgrade import *
from .config.config_sections import *
from .config.config_fields import *
from .config import *
# Contract
from .contract.contract_info import *
from .contract import *
# Metadata
from .metadata.address_metadata_info import *
from .metadata.address_metadata import *
from .metadata.mosaic_metadata_info import *
from .metadata.mosaic_metadata import *
from .metadata.namespace_metadata_info import *
from .metadata.namespace_metadata import *
from .metadata.field import *
from .metadata.metadata_modification import *
from .metadata.metadata_type import *
from .metadata.metadata_modification_type import *
from .metadata.metadata_info import *
from .metadata import *
# Mosaic
from .mosaic.mosaic import *
from .mosaic.mosaic_id import *
from .mosaic.mosaic_info import *
from .mosaic.mosaic_name import *
from .mosaic.mosaic_nonce import *
from .mosaic.mosaic_properties import *
from .mosaic.mosaic_supply_type import *
from .mosaic.network_currency_mosaic import *
from .mosaic.network_harvest_mosaic import *
from .mosaic import *
# Namespace
from .namespace.address_alias import *
from .namespace.alias import *
from .namespace.alias_action_type import *
from .namespace.alias_type import *
from .namespace.empty_alias import *
from .namespace.mosaic_alias import *
from .namespace.namespace_id import *
from .namespace.namespace_info import *
from .namespace.namespace_name import *
from .namespace.namespace_type import *
from .namespace import *
# Node
from .node.node_info import *
from .node.node_time import *
from .node import *
# Transaction
from .transaction.account_link_transaction import *
from .transaction.account_property_modification import *
from .transaction.address_alias_transaction import *
from .transaction.aggregate_transaction_cosignature import *
from .transaction.aggregate_transaction_info import *
from .transaction.aggregate_transaction import *
from .transaction.alias_transaction import *
from .transaction.blockchain_upgrade_transaction import *
from .transaction.cosignature_signed_transaction import *
from .transaction.cosignature_transaction import *
from .transaction.deadline import *
from .transaction.hash_lock_transaction import *
from .transaction.hash_type import *
from .transaction.inner_transaction import *
from .transaction.link_action import *
from .transaction.lock_funds_transaction import *
from .transaction.message import *
from .transaction.message_type import *
from .transaction.modify_account_property_address_transaction import *
from .transaction.modify_account_property_entity_type_transaction import *
from .transaction.modify_account_property_mosaic_transaction import *
from .transaction.modify_account_property_transaction import *
from .transaction.modify_account_metadata_transaction import *
from .transaction.modify_mosaic_metadata_transaction import *
from .transaction.modify_namespace_metadata_transaction import *
from .transaction.modify_metadata_transaction import *
from .transaction.modify_multisig_account_transaction import *
from .transaction.mosaic_alias_transaction import *
from .transaction.mosaic_definition_transaction import *
from .transaction.mosaic_supply_change_transaction import *
from .transaction.multisig_cosignatory_modification import *
from .transaction.multisig_cosignatory_modification_type import *
from .transaction.network_config_transaction import *
from .transaction.plain_message import *
from .transaction.register_namespace_transaction import *
from .transaction.secret_lock_transaction import *
from .transaction.secret_proof_transaction import *
from .transaction.signed_transaction import *
from .transaction.sync_announce import *
from .transaction.transaction import *
from .transaction.transaction_announce_response import *
from .transaction.transaction_info import *
from .transaction.transaction_status import *
from .transaction.transaction_status_error import *
from .transaction.transaction_status_group import *
from .transaction.transaction_type import *
from .transaction.transaction_version import *
from .transaction.transfer_transaction import *
from .transaction import *
__all__ = (
# Account
account.__all__
+ account_info.__all__
+ account_meta.__all__
+ account_property.__all__
+ account_properties.__all__
+ account_names.__all__
+ address.__all__
+ multisig_account_graph_info.__all__
+ multisig_account_info.__all__
+ property_modification_type.__all__
+ property_type.__all__
+ public_account.__all__
# Blockchain
+ block_info.__all__
+ block_type.__all__
+ blockchain_score.__all__
+ blockchain_storage_info.__all__
+ network_type.__all__
# Receipt
+ balance_change_receipt.__all__
+ balance_transfer_receipt.__all__
+ artifact_expiry_receipt.__all__
+ inflation_receipt.__all__
+ receipt.__all__
+ receipt_type.__all__
+ receipt_version.__all__
+ source.__all__
+ statements.__all__
+ transaction_statement.__all__
+ resolution_statement.__all__
+ resolution_entry.__all__
# Config
+ catapult_config.__all__
+ catapult_upgrade.__all__
+ config_sections.__all__
+ config_fields.__all__
# Metadata
+ address_metadata_info.__all__
+ address_metadata.__all__
+ mosaic_metadata_info.__all__
+ mosaic_metadata.__all__
+ namespace_metadata_info.__all__
+ namespace_metadata.__all__
+ metadata_type.__all__
+ metadata_modification_type.__all__
+ metadata_modification.__all__
+ metadata_info.__all__
+ field.__all__
# Mosaic
+ mosaic.__all__
+ mosaic_id.__all__
+ mosaic_info.__all__
+ mosaic_name.__all__
+ mosaic_nonce.__all__
+ mosaic_properties.__all__
+ mosaic_supply_type.__all__
+ network_currency_mosaic.__all__
+ network_harvest_mosaic.__all__
# Namespace
+ address_alias.__all__
+ alias.__all__
+ alias_action_type.__all__
+ alias_type.__all__
+ empty_alias.__all__
+ mosaic_alias.__all__
+ namespace_id.__all__
+ namespace_info.__all__
+ namespace_name.__all__
+ namespace_type.__all__
# Node
+ node_info.__all__
+ node_time.__all__
# Transaction
+ account_link_transaction.__all__
+ account_property_modification.__all__
+ address_alias_transaction.__all__
+ aggregate_transaction_cosignature.__all__
+ aggregate_transaction_info.__all__
+ aggregate_transaction.__all__
+ alias_transaction.__all__
+ blockchain_upgrade_transaction.__all__
+ cosignature_signed_transaction.__all__
+ cosignature_transaction.__all__
+ deadline.__all__
+ hash_lock_transaction.__all__
+ hash_type.__all__
+ inner_transaction.__all__
+ link_action.__all__
+ lock_funds_transaction.__all__
+ message.__all__
+ message_type.__all__
+ modify_account_property_address_transaction.__all__
+ modify_account_property_entity_type_transaction.__all__
+ modify_account_property_mosaic_transaction.__all__
+ modify_account_property_transaction.__all__
+ modify_account_metadata_transaction.__all__
+ modify_mosaic_metadata_transaction.__all__
+ modify_namespace_metadata_transaction.__all__
+ modify_metadata_transaction.__all__
+ modify_multisig_account_transaction.__all__
+ mosaic_alias_transaction.__all__
+ mosaic_definition_transaction.__all__
+ mosaic_supply_change_transaction.__all__
+ multisig_cosignatory_modification.__all__
+ multisig_cosignatory_modification_type.__all__
+ network_config_transaction.__all__
+ plain_message.__all__
+ register_namespace_transaction.__all__
+ secret_lock_transaction.__all__
+ secret_proof_transaction.__all__
+ signed_transaction.__all__
+ sync_announce.__all__
+ transaction.__all__
+ transaction_announce_response.__all__
+ transaction_info.__all__
+ transaction_status.__all__
+ transaction_status_error.__all__
+ transaction_status_group.__all__
+ transaction_type.__all__
+ transaction_version.__all__
+ transfer_transaction.__all__
)
| 33.838188 | 76 | 0.794568 | 1,202 | 10,456 | 6.255408 | 0.133943 | 0.155606 | 0.13406 | 0.123421 | 0.27118 | 0.077936 | 0.021146 | 0 | 0 | 0 | 0 | 0.000891 | 0.141546 | 10,456 | 308 | 77 | 33.948052 | 0.836787 | 0.091048 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.524793 | 0 | 0.524793 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
e0c3d5afb7e82dbda4be4da28d905f2cfa09ef8d | 1,130 | py | Python | assets/complex_directionals.py | Neph0/text-to-input | 1ee8fd2991070cba1ac6b4036e63814aefda3f59 | [
"Unlicense"
] | 2 | 2020-12-08T17:38:57.000Z | 2022-01-15T03:22:11.000Z | assets/complex_directionals.py | Neph0/text-to-input | 1ee8fd2991070cba1ac6b4036e63814aefda3f59 | [
"Unlicense"
] | null | null | null | assets/complex_directionals.py | Neph0/text-to-input | 1ee8fd2991070cba1ac6b4036e63814aefda3f59 | [
"Unlicense"
] | null | null | null | from os import sep
import collections
image_path = "assets/complex_directionals.png"
coll = collections.OrderedDict([
('chargedown', (0,0,46,81)),
('chargeback', (107,0,81,46)),
('dash', (189,0,69,46)),
('srk', (47,0,59,66)),
('rsrk', (575,0,59,66)),
('dp', (47,0,59,66)),
('rdp', (575,0,59,66)),
('qcb', (433,0,70,64)),
('qcf', (505,0,70,64)),
('hcb', (258,0,87,64)),
('hcf', (346,0,87,64)),
('spd', (636,0,81,81)),
('fc', (636,0,81,81)),
('66', (189,0,69,46)),
('623', (47,0,59,66)),
('421', (575,0,59,66)),
('214', (433,0,70,64)),
('236', (505,0,70,64)),
('63214', (258,0,87,64)),
('41236', (346,0,87,64))
])
_inputs = {
image_path: coll
}
| 35.3125 | 54 | 0.309735 | 119 | 1,130 | 2.907563 | 0.428571 | 0.052023 | 0.086705 | 0.060694 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.306507 | 0.483186 | 1,130 | 31 | 55 | 36.451613 | 0.285959 | 0 | 0 | 0 | 0 | 0 | 0.095575 | 0.027434 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0cee46a8df73d9f0472e9ba65a0ab985d4c21c3 | 2,337 | py | Python | app/playlist_maker/reddit.py | Arjay123/daily-reddit-playlist | cb005846908f06233285b89b1d894908e5d6211d | [
"Apache-2.0"
] | null | null | null | app/playlist_maker/reddit.py | Arjay123/daily-reddit-playlist | cb005846908f06233285b89b1d894908e5d6211d | [
"Apache-2.0"
] | 5 | 2020-02-11T23:25:51.000Z | 2021-06-10T18:51:05.000Z | app/playlist_maker/reddit.py | Arjay123/daily-reddit-playlist | cb005846908f06233285b89b1d894908e5d6211d | [
"Apache-2.0"
] | null | null | null | import praw
import time
import requests
from datetime import date
from datetime import timedelta
from playlist_maker.reddit_secrets import PASSWORD
from playlist_maker.reddit_secrets import CLIENT_SECRET
from playlist_maker.reddit_secrets import CLIENT_ID
# List of supported subreddits
SUBREDDITS = ['hiphopheads']
# Disable SNIMissing Warnings, InsecurePlatformWarnings due to use older
# version of python w/ urllib3
requests.packages.urllib3.disable_warnings()
def create_reddit_instance():
"""
Creates and returns a reddit instance w/ credentials from reddit_secrets
script
#TODO - check reddit instance is valid before returning
"""
return praw.Reddit(client_id=CLIENT_ID,
client_secret=CLIENT_SECRET,
password=PASSWORD,
user_agent='testscript by /u/daily-reddit-playlis',
username='daily-reddit-playlis')
def retrieve_submissions_from_subreddit(reddit, subreddit_name):
"""
Retrieves all submissions for a specific subreddit from previous day
Args:
reddit - reddit instance
subreddit_name - name of subreddit as a string
Returns:
List of submissions
# TODO - check subreddit is valid before retrieving submissions
"""
subreddit = reddit.subreddit(subreddit_name)
start_timestamp, end_timestamp = get_unix_timestamps_prev_day()
submissions = subreddit.submissions(start=start_timestamp,
end=end_timestamp)
results = []
for sub in submissions:
results.append({
'title': sub.title,
'url': sub.url,
'created': sub.created_utc
})
return results
def get_unix_timestamps_prev_day():
"""
Returns start and end UNIX timestamps for the previous day
"""
today = date.today()
yesterday = today + timedelta(days=-1)
yesterday_start_timestamp = time.mktime(yesterday.timetuple())
yesterday_end_timestamp = time.mktime(today.timetuple()) - 1
return yesterday_start_timestamp, yesterday_end_timestamp
def filter_song_and_artist(sub_title):
"""
Checks a submission title against a regex and returns a tuple in the form
of (artist, trackname) if the submission is a song submission
"""
pass
| 26.862069 | 77 | 0.690201 | 272 | 2,337 | 5.753676 | 0.389706 | 0.033227 | 0.032588 | 0.044089 | 0.107348 | 0.076677 | 0.053674 | 0 | 0 | 0 | 0 | 0.002265 | 0.24433 | 2,337 | 86 | 78 | 27.174419 | 0.883918 | 0.306804 | 0 | 0 | 0 | 0 | 0.054641 | 0.015142 | 0 | 0 | 0 | 0.023256 | 0 | 1 | 0.108108 | false | 0.081081 | 0.216216 | 0 | 0.405405 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
e0cfb77e3f30ee00ef5bc4eba30ce8e3e10bac94 | 16,810 | py | Python | exploreframework/framework/filters.py | abdurrahmanvizier/frameworkdjango | 132becee33ad0363f695b90bf4900f3b9647e0fb | [
"BSD-2-Clause"
] | null | null | null | exploreframework/framework/filters.py | abdurrahmanvizier/frameworkdjango | 132becee33ad0363f695b90bf4900f3b9647e0fb | [
"BSD-2-Clause"
] | 5 | 2021-03-30T13:58:14.000Z | 2021-09-22T19:24:10.000Z | exploreframework/framework/filters.py | abdurrahmanvizier/frameworkdjango | 132becee33ad0363f695b90bf4900f3b9647e0fb | [
"BSD-2-Clause"
] | null | null | null | import django_filters
from crispy_forms.layout import Layout, Submit, Row, Column, HTML
from crispy_forms.helper import FormHelper
from django_filters import CharFilter
from django import forms
from .models import *
class OwnerFilter(django_filters.FilterSet):
ownername = CharFilter(label='Owner Name', field_name='ownername', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = Owner
fields = '__all__'
exclude = ('ownerhashkey')
def __init__(self, *args, **kwargs):
super(OwnerFilter, self).__init__(*args, **kwargs)
class UserFilter(django_filters.FilterSet):
username = CharFilter(label='User Name', field_name='username', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
resourcepool = CharFilter(label='Resource Pool', field_name='resourcepool', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = ObjectUser
fields = '__all__'
exclude = ('userhashkey')
def __init__(self, *args, **kwargs):
super(UserFilter, self).__init__(*args, **kwargs)
class StorageEngineFilter(django_filters.FilterSet):
storageenginetype = CharFilter(label='Storage Engine Name', field_name='storageenginetype', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = StorageEngine
fields = '__all__'
exclude = ('storageenginehashkey')
def __init__(self, *args, **kwargs):
super(StorageEngineFilter, self).__init__(*args, **kwargs)
class ObjectTypeFilter(django_filters.FilterSet):
objecttype = CharFilter(label='Object Type', field_name='objecttype', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = ObjectType
fields = '__all__'
exclude = ('objecttypehashkey')
def __init__(self, *args, **kwargs):
super(ObjectTypeFilter, self).__init__(*args, **kwargs)
class PartitionByFilter(django_filters.FilterSet):
partitionby = CharFilter(label='Partition By', field_name='partitionby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = PartitionBy
fields = '__all__'
exclude = ('objectpartitionhashkey')
def __init__(self, *args, **kwargs):
super(PartitionByFilter, self).__init__(*args, **kwargs)
class SnapShotFilter(django_filters.FilterSet):
snapshot1 = CharFilter(label='Snapshot 1', field_name='snapshot1', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
snapshot2 = CharFilter(label='Snapshot 2', field_name='snapshot2', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = SnapShot
fields = '__all__'
exclude = ('objectsnapshothashkey')
def __init__(self, *args, **kwargs):
super(SnapShotFilter, self).__init__(*args, **kwargs)
class ServerFilter(django_filters.FilterSet):
servername = CharFilter(label='Server Name', field_name='servername', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
hostname = CharFilter(label='Hostname', field_name='hostname', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = Server
fields = '__all__'
exclude = ('serverhashkey', 'user', 'password')
def __init__(self, *args, **kwargs):
super(ServerFilter, self).__init__(*args, **kwargs)
class ServerTunnelFilter(django_filters.FilterSet):
servertunnelname = CharFilter(label='Server Name', field_name='servertunnelname', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
hostname = CharFilter(label='Hostname', field_name='hostname', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = ServerTunnel
fields = '__all__'
exclude = ('servertunnelhashkey', 'port', 'user', 'password', 'private_key_user', 'private_key_password')
def __init__(self, *args, **kwargs):
super(ServerTunnelFilter, self).__init__(*args, **kwargs)
class DatabaseFilter(django_filters.FilterSet):
applicationname = CharFilter(label='Application Name', field_name='applicationname', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
databasename = CharFilter(label='Database Name', field_name='databasename', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
hostname = CharFilter(label='Hostname', field_name='hostname', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = Database
fields = '__all__'
exclude = ('databasehashkey', 'port', 'databasetype', 'password', 'username')
def __init__(self, *args, **kwargs):
super(DatabaseFilter, self).__init__(*args, **kwargs)
class FileFilter(django_filters.FilterSet):
filename = CharFilter(label='User Name', field_name='filename', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
location = CharFilter(label='Resource Pool', field_name='location', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = File
fields = '__all__'
exclude = ('filehashkey', 'delimiter', 'path', )
def __init__(self, *args, **kwargs):
super(FileFilter, self).__init__(*args, **kwargs)
class QueryFilter(django_filters.FilterSet):
name = CharFilter(label='Query Name', field_name='name', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
desc = CharFilter(label='Description', field_name='desc', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = Query
fields = '__all__'
exclude = ('queryhashkey', 'path')
def __init__(self, *args, **kwargs):
super(QueryFilter, self).__init__(*args, **kwargs)
class ProcessFilter(django_filters.FilterSet):
processcode = CharFilter(label='Process Code', field_name='processcode', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
processdesc = CharFilter(label='Process Description', field_name='processdesc', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = Process
fields = '__all__'
exclude = ('processhashkey')
def __init__(self, *args, **kwargs):
super(ProcessFilter, self).__init__(*args, **kwargs)
class EngineFilter(django_filters.FilterSet):
enginename = CharFilter(label='Engine Name', field_name='enginename', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = Engine
fields = '__all__'
exclude = ('enginehashkey')
def __init__(self, *args, **kwargs):
super(EngineFilter, self).__init__(*args, **kwargs)
class ProcessEngineFilter(django_filters.FilterSet):
processhashkey = CharFilter(label='Process Code', field_name='processhashkey', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
enginehashkey = CharFilter(label='Engine Name', field_name='enginehashkey', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = ProcessEngine
fields = '__all__'
exclude = ('processenginehashkey')
def __init__(self, *args, **kwargs):
super(ProcessEngineFilter, self).__init__(*args, **kwargs)
class ObjectFilter(django_filters.FilterSet):
objecthashkey = CharFilter(label='Filter Object By', field_name='objecthashkey', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
objectdesc = CharFilter(label='Description Object', field_name='objectdesc', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = Object
fields = '__all__'
exclude = ('objectcode', 'objectname')
def __init__(self, *args, **kwargs):
super(ObjectFilter, self).__init__(*args, **kwargs)
class MultipleRelationFilter(django_filters.FilterSet):
createdby = CharFilter(label='Created By', field_name='createdby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
status = CharFilter(label='Status', field_name='status', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = MultipleRelation
fields = ('createdby','status')
exclude = ('id', 'filepath', 'createdat', 'excel')
def __init__(self, *args, **kwargs):
super(MultipleRelationFilter, self).__init__(*args, **kwargs)
class ObjectProcessFilter(django_filters.FilterSet):
src_objecthashkey = CharFilter(label='Object Source', field_name='src_objecthashkey', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
dest_objecthashkey = CharFilter(label='Object Destinitions', field_name='dest_objecthashkey', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
processenginehashkey = CharFilter(label='Process Engine', field_name='processenginehashkey', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
userhashkey = CharFilter(label='User', field_name='userhashkey', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = ObjectProcess
fields = '__all__'
exclude = ('objectprocesshashkey')
def __init__(self, *args, **kwargs):
super(ObjectProcessFilter, self).__init__(*args, **kwargs)
class ObjectAllFilter(django_filters.FilterSet):
objecthashkey = CharFilter(label='Filter Object By', field_name='objecthashkey', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedby = CharFilter(label='Created By', field_name='sourcesystemcreatedby', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
sourcesystemcreatedtime = CharFilter(label='Created At', field_name='sourcesystemcreatedtime', lookup_expr='icontains', widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = CreateObject
fields = ('objecthashkey', 'sourcesystemcreatedby', 'sourcesystemcreatedtime')
# search_fields = ["objecthashkey__objecthashkey", "sourcesystemcreatedby", "sourcesystemcreatedtime"]
def __init__(self, *args, **kwargs):
super(ObjectAllFilter, self).__init__(*args, **kwargs) | 58.16609 | 180 | 0.720464 | 1,699 | 16,810 | 6.907593 | 0.085933 | 0.085634 | 0.10847 | 0.142723 | 0.723415 | 0.690099 | 0.62713 | 0.616224 | 0.616224 | 0.616224 | 0 | 0.00041 | 0.128733 | 16,810 | 289 | 181 | 58.16609 | 0.800901 | 0.005949 | 0 | 0.459596 | 0 | 0 | 0.249327 | 0.049973 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0.015152 | 0.030303 | 0 | 0.641414 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
e0d45883a28d2d7753f33d82846d2c079be75b6d | 242 | py | Python | piccolo/query/__init__.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | null | null | null | piccolo/query/__init__.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | null | null | null | piccolo/query/__init__.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | null | null | null | from .base import Query # noqa: F401
from .methods import ( # noqa: F401
Alter,
Select,
Objects,
Insert,
Delete,
Create,
Update,
Raw,
TableExists,
Exists,
Count,
CreateIndex,
DropIndex,
)
| 14.235294 | 37 | 0.57438 | 24 | 242 | 5.791667 | 0.833333 | 0.115108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0375 | 0.338843 | 242 | 16 | 38 | 15.125 | 0.83125 | 0.086777 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.125 | 0 | 0.125 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0d5be17e1992a731f1b3787b029f15abd431ed9 | 1,874 | py | Python | Simulation 1 Pseudo-Aquatic/planning/history.py | MacIver-Lab/gridworld-decisionmaking | 603747dfcb9652d994b8f7fc8986383a11f12965 | [
"MIT"
] | 3 | 2020-07-08T16:10:04.000Z | 2021-05-24T16:47:24.000Z | Simulation 2 Pseudo-Terrestrial/planning/history.py | MacIver-Lab/gridworld-decisionmaking | 603747dfcb9652d994b8f7fc8986383a11f12965 | [
"MIT"
] | null | null | null | Simulation 2 Pseudo-Terrestrial/planning/history.py | MacIver-Lab/gridworld-decisionmaking | 603747dfcb9652d994b8f7fc8986383a11f12965 | [
"MIT"
] | 2 | 2020-06-12T04:57:47.000Z | 2021-04-07T02:40:56.000Z | class History:
def __init__(self):
self.HistoryVector = []
def Add(self, action, observation=-1, state=None):
self.HistoryVector.append(ENTRY(action, observation, state))
def GetVisitedStates(self):
states = []
if self.HistoryVector:
for history in self.HistoryVector:
if history.State:
states.append(history.State)
return states
def Pop(self):
self.HistoryVector = self.HistoryVector[:-1]
def Truncate(self, t):
self.HistoryVector = self.HistoryVector[:t]
def Clear(self):
self.HistoryVector[:] = []
def Forget(self, t):
self.HistoryVector = self.HistoryVector[t:]
def Size(self):
return len(self.HistoryVector)
def Back(self):
assert(self.Size() > 0)
return self.HistoryVector[-1]
def __eq__(self, other):
if(other.Size() != self.Size()):
return False
for i,history in enumerate(other):
if (history.Action != self.HistoryVector[i].Action) or \
(history.Observation != self.HistoryVector[i].Observation):
return False
return True
def __getitem__(self, t):
assert(t>=0 and t<self.Size())
return self.HistoryVector[t]
class ENTRY:
def __init__(self, action, observation, state):
self.Action = action
self.Observation = observation
self.State = state
def __str__(self):
return "(" + str(self.Action) + " , " + str(self.Observation) + ")"
if __name__ == "__main__":
entry = ENTRY(1, 1, None)
history = History()
history.Add(1, 1)
assert(history.Size() == 1)
history.Add(2, 2)
print(history)
assert(History().Add(1, 1) == History().Add(1, 1)) | 28.393939 | 80 | 0.561366 | 202 | 1,874 | 5.069307 | 0.217822 | 0.265625 | 0.061523 | 0.099609 | 0.083984 | 0.083984 | 0.083984 | 0.083984 | 0 | 0 | 0 | 0.012432 | 0.313234 | 1,874 | 66 | 81 | 28.393939 | 0.783217 | 0 | 0 | 0.039216 | 0 | 0 | 0.007182 | 0 | 0 | 0 | 0 | 0 | 0.078431 | 1 | 0.254902 | false | 0 | 0 | 0.039216 | 0.45098 | 0.019608 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0e2fdf5cea113209cecec8f2632a4c8945b1ad3 | 1,831 | py | Python | webapp/tests/test_answer_process.py | Guitaryuga/LearnPython-E-learn-project | cbafb8c93f0931d0d1d411473eaf41a11193e756 | [
"MIT"
] | 1 | 2021-04-17T15:25:42.000Z | 2021-04-17T15:25:42.000Z | webapp/tests/test_answer_process.py | Guitaryuga/LearnPython-E-learn-project | cbafb8c93f0931d0d1d411473eaf41a11193e756 | [
"MIT"
] | null | null | null | webapp/tests/test_answer_process.py | Guitaryuga/LearnPython-E-learn-project | cbafb8c93f0931d0d1d411473eaf41a11193e756 | [
"MIT"
] | null | null | null | def test_answer(test_client, login, confirmation, answerchecking):
"""
Тест процесса проверки выбора правильного варианта ответа в тестовом
вопросе закрытого типа, категория алерта - success, возможность
повторно ответить блокируется
"""
response = test_client.get('/course/1/lesson/1', follow_redirects=True)
assert response.status_code == 200
assert b'success' in response.data
assert b'list-group-item disabled' in response.data
def test_wrong_answer(test_client, login, confirmation, wrong_answerchecking):
"""
Тест процесса проверки выбора неправильного варианта ответа в тестовом
вопросе закрытого типа, категория алерта - danger, возможность повторно
овтетить остается
"""
response = test_client.get('/course/1/lesson/1', follow_redirects=True)
assert response.status_code == 200
assert b'danger' in response.data
assert b'list-group-item' in response.data
def test_handwriteanswer(test_client, login, confirmation, handwritechecking):
"""
Тест процесса проверки написанного ПРАВИЛЬНОГО варианта ответа в вопросе
открытого типа, категория алерта - success, форма овтета блокируется
"""
response = test_client.get('/course/1/lesson/2', follow_redirects=True)
assert response.status_code == 200
assert b'success' in response.data
assert b'form-control' in response.data
def test_wrong_handwriteanswer(test_client, login, confirmation,
wrong_handwritechecking):
"""
Тест процесса проверки написанного НЕПРАВИЛЬНОГО варианта ответа в вопросе
открытого типа, категория алерта - danger, форма ответа не блокируется
"""
response = test_client.get('/course/1/lesson/2', follow_redirects=True)
assert response.status_code == 200
assert b'danger' in response.data
| 39.804348 | 78 | 0.736756 | 221 | 1,831 | 5.995475 | 0.271493 | 0.060377 | 0.073962 | 0.081509 | 0.859623 | 0.590189 | 0.550943 | 0.550943 | 0.442264 | 0.354717 | 0 | 0.013378 | 0.183506 | 1,831 | 45 | 79 | 40.688889 | 0.87291 | 0.333697 | 0 | 0.6 | 0 | 0 | 0.131393 | 0 | 0 | 0 | 0 | 0 | 0.55 | 1 | 0.2 | false | 0 | 0 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0e565b945adc896225049963c9d26bd9d8250a4 | 376 | py | Python | experiments/cpu_gpu.py | seyfullah/stockprediction | aab0547cc1316a116ad032137722b73a36e67a51 | [
"Apache-2.0"
] | null | null | null | experiments/cpu_gpu.py | seyfullah/stockprediction | aab0547cc1316a116ad032137722b73a36e67a51 | [
"Apache-2.0"
] | null | null | null | experiments/cpu_gpu.py | seyfullah/stockprediction | aab0547cc1316a116ad032137722b73a36e67a51 | [
"Apache-2.0"
] | null | null | null | import torch
import time
n = 40000
loop = 1000
###CPU
start_time = time.time()
a = torch.ones(n,n)
for _ in range(loop):
a += a
elapsed_time = time.time() - start_time
print('CPU time = ',elapsed_time)
###GPU
start_time = time.time()
b = torch.ones(n,n).cuda()
for _ in range(loop):
b += b
elapsed_time = time.time() - start_time
print('GPU time = ',elapsed_time) | 16.347826 | 39 | 0.656915 | 63 | 376 | 3.761905 | 0.301587 | 0.270042 | 0.202532 | 0.14346 | 0.278481 | 0.278481 | 0.278481 | 0 | 0 | 0 | 0 | 0.029221 | 0.180851 | 376 | 23 | 40 | 16.347826 | 0.74026 | 0.015957 | 0 | 0.375 | 0 | 0 | 0.060274 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0.125 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e0fe77171642b9a38b2560c968ee0d7557e043b2 | 896 | py | Python | containercafe/lxc/composites.py | rcbops-qe/containercafe | c217af0fb84b0cf9e2e537626a46aa19c8e2e286 | [
"Apache-2.0"
] | null | null | null | containercafe/lxc/composites.py | rcbops-qe/containercafe | c217af0fb84b0cf9e2e537626a46aa19c8e2e286 | [
"Apache-2.0"
] | null | null | null | containercafe/lxc/composites.py | rcbops-qe/containercafe | c217af0fb84b0cf9e2e537626a46aa19c8e2e286 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2014 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .client import LxcClient
from ..common.config import ContainersSetupConfig
class BaseLxcComposite(object):
""" This is the base composite class for an lxc cafe object """
def __init__(self, name, connection):
self.config = ContainersSetupConfig()
self.client = LxcClient(name=name, connection=connection)
| 33.185185 | 72 | 0.764509 | 127 | 896 | 5.362205 | 0.637795 | 0.088106 | 0.038179 | 0.04699 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010753 | 0.169643 | 896 | 26 | 73 | 34.461538 | 0.90457 | 0.676339 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
460163abe7c3092399847b7c71e2e229c609c8ac | 7,607 | py | Python | Pyext/test/py_u_test_get_attr.py | ecmwf/ecflow | 2498d0401d3d1133613d600d5c0e0a8a30b7b8eb | [
"Apache-2.0"
] | 11 | 2020-08-07T14:42:45.000Z | 2021-10-21T01:59:59.000Z | Pyext/test/py_u_test_get_attr.py | CoollRock/ecflow | db61dddc84d3d2c7dd6af95fd799d717c6bc2a6d | [
"Apache-2.0"
] | 10 | 2020-08-07T14:36:27.000Z | 2022-02-22T06:51:24.000Z | Pyext/test/py_u_test_get_attr.py | CoollRock/ecflow | db61dddc84d3d2c7dd6af95fd799d717c6bc2a6d | [
"Apache-2.0"
] | 6 | 2020-08-07T14:34:38.000Z | 2022-01-10T12:06:27.000Z | #////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
# Name :
# Author : Avi
# Revision : $Revision: #10 $
#
# Copyright 2009-2020 ECMWF.
# This software is licensed under the terms of the Apache Licence version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
#////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
# SCRATCH test for ecflow python api
from ecflow import Alias, AttrType, Autocancel, CheckPt, ChildCmdType, Client, Clock, Cron, DState, Date, Day, Days, \
Defs, Ecf, Event, Expression, Family, FamilyVec, File, Flag, FlagType, FlagTypeVec, InLimit, \
JobCreationCtrl, Label, Late, Limit, Meter, Node, NodeContainer, NodeVec, PartExpression, PrintStyle, \
Repeat, RepeatDate,RepeatDateList, RepeatDay, RepeatEnumerated, RepeatInteger, RepeatString, SState, State, Style, \
Submittable, Suite, SuiteVec, Task, TaskVec, Time, TimeSeries, TimeSlot, Today, UrlCmd, Variable, \
VariableList, Verify, WhyCmd, ZombieAttr, ZombieType, ZombieUserActionType, Trigger, Complete, Edit, Defstatus
import unittest
import sys
import os
class Test_get_attr(unittest.TestCase):
def test_get_attr(self):
defs = Defs() + (Suite('s') + Family('f').add(Task('t') + Edit(var="1")))
defs += Edit(var="1")
self.assertIsInstance(defs.s, Suite, "Expected suite")
self.assertIsInstance(defs.s.f, Family, "Expected family")
self.assertIsInstance(defs.s.f.t, Task, "Expected Task but found " + str(type(defs.s.f.t)))
self.assertIsInstance(defs.var, Variable, "Expected Variable but found " + str(type(defs.var)))
self.assertIsInstance(defs.s.f.t.var, Variable, "Expected Variable but found " + str(type(defs.s.f.t.var)))
def test_get_attr_generated_variables(self):
defs = Defs() + (Suite('s') + Family('f').add((Task('t') + Edit(var="1") + RepeatDate("YMD", 20100111, 20100115, 2)),
(Task('t2') + Edit(var="1") + RepeatDateList("YMD",[20100111, 20100115 ]))))
defs.s.f.t += Meter("meter",0,100)
defs.s.f.t += Event("event")
defs.s.f.t += Limit("limitx",10)
#PrintStyle.set_style(Style.STATE)
#print(defs)
self.assertTrue(defs.ECF_MICRO, "expected generated variable")
self.assertTrue(defs.ECF_HOME, "expected generated variable")
self.assertTrue(defs.ECF_JOB_CMD , "expected generated variable")
self.assertTrue(defs.ECF_KILL_CMD , "expected generated variable")
self.assertTrue(defs.ECF_STATUS_CMD , "expected generated variable")
self.assertTrue(defs.ECF_URL_CMD , "expected generated variable")
self.assertTrue(defs.ECF_LOG , "expected generated variable")
self.assertTrue(defs.ECF_INTERVAL , "expected generated variable")
self.assertTrue(defs.ECF_LISTS , "expected generated variable")
self.assertTrue(defs.ECF_CHECK , "expected generated variable")
self.assertTrue(defs.ECF_CHECKOLD , "expected generated variable")
self.assertTrue(defs.ECF_CHECKINTERVAL , "expected generated variable")
self.assertTrue(defs.ECF_CHECKMODE , "expected generated variable")
self.assertTrue(defs.ECF_TRIES , "expected generated variable")
self.assertTrue(defs.ECF_VERSION , "expected generated variable")
self.assertTrue(defs.ECF_PORT , "expected generated variable")
self.assertTrue(defs.ECF_HOST , "expected generated variable")
self.assertTrue(defs.s.SUITE, "expected generated variable")
self.assertEqual(defs.s.SUITE.value() , 's', "expected suite name of 's' but found")
self.assertTrue(defs.s.ECF_DATE , "expected generated variable")
self.assertTrue(defs.s.YYYY , "expected generated variable")
self.assertTrue(defs.s.DOW , "expected generated variable")
self.assertTrue(defs.s.DOY , "expected generated variable")
self.assertTrue(defs.s.DATE , "expected generated variable")
self.assertTrue(defs.s.DAY , "expected generated variable")
self.assertTrue(defs.s.DD , "expected generated variable")
self.assertTrue(defs.s.MM , "expected generated variable")
self.assertTrue(defs.s.MONTH , "expected generated variable")
self.assertTrue(defs.s.ECF_CLOCK , "expected generated variable")
self.assertTrue(defs.s.ECF_TIME , "expected generated variable")
self.assertTrue(defs.s.TIME , "expected generated variable")
self.assertTrue(defs.s.f.FAMILY , "expected generated variable")
self.assertTrue(defs.s.f.FAMILY1 , "expected generated variable")
self.assertTrue(defs.s.f.t.TASK , "expected generated variable")
self.assertEqual(defs.s.f.t.TASK.value() , 't', "expected task name of 's'")
self.assertTrue(defs.s.f.t.ECF_JOB , "expected generated variable")
self.assertTrue(defs.s.f.t.ECF_SCRIPT , "expected generated variable")
self.assertTrue(defs.s.f.t.ECF_JOBOUT , "expected generated variable")
self.assertTrue(defs.s.f.t.ECF_TRYNO , "expected generated variable")
self.assertEqual(defs.s.f.t.ECF_TRYNO.value() , '0', "expected task try no of '0'")
self.assertTrue(defs.s.f.t.ECF_RID , "expected generated variable")
self.assertTrue(defs.s.f.t.ECF_NAME , "expected generated variable")
self.assertEqual(defs.s.f.t.ECF_NAME.value() , '/s/f/t', "expected task ECF_NAME of '/s/f/t'")
self.assertTrue(defs.s.f.t.ECF_PASS , "expected generated variable")
self.assertEqual(defs.s.f.t.YMD.value() , '20100111', "expected generated YMD of value")
self.assertEqual(defs.s.f.t.YMD_YYYY.value() , '2010', "expected generated YMD of value")
self.assertEqual(defs.s.f.t.YMD_MM.value() , '1', "expected generated YMD of value")
self.assertEqual(defs.s.f.t.YMD_DD.value() , '11', "expected generated YMD of value")
self.assertEqual(defs.s.f.t.YMD_DOW.value() , '1', "expected generated YMD of value")
self.assertEqual(defs.s.f.t.YMD_JULIAN.value() , '2455208', "expected generated YMD of value")
self.assertEqual(defs.s.f.t.event.value() , 0, "expected generated event of value 0 but found " + str(defs.s.f.t.event.value()))
self.assertEqual(defs.s.f.t.meter.value() , 0, "expected generated meter of value 0 but found " + str(defs.s.f.t.meter.value()))
self.assertEqual(defs.s.f.t.limitx.value() , 0, "expected generated limit of value 0 but found " + str(defs.s.f.t.limitx.value()))#
self.assertEqual(defs.s.f.t2.YMD.value() , '20100111', "expected generated YMD of value")
self.assertEqual(defs.s.f.t2.YMD_YYYY.value() , '2010', "expected generated YMD of value")
self.assertEqual(defs.s.f.t2.YMD_MM.value() , '1', "expected generated YMD of value")
self.assertEqual(defs.s.f.t2.YMD_DD.value() , '11', "expected generated YMD of value")
self.assertEqual(defs.s.f.t2.YMD_DOW.value() , '1', "expected generated YMD of value")
self.assertEqual(defs.s.f.t2.YMD_JULIAN.value() , '2455208', "expected generated YMD of value")
if __name__ == "__main__":
unittest.main()
print("All Tests pass")
| 66.147826 | 139 | 0.654397 | 990 | 7,607 | 4.966667 | 0.207071 | 0.190157 | 0.146431 | 0.235916 | 0.670531 | 0.650397 | 0.622941 | 0.41326 | 0.291845 | 0.231849 | 0 | 0.022099 | 0.196924 | 7,607 | 114 | 140 | 66.72807 | 0.78278 | 0.087157 | 0 | 0 | 0 | 0 | 0.278909 | 0 | 0 | 0 | 0 | 0 | 0.744186 | 1 | 0.023256 | false | 0.023256 | 0.046512 | 0 | 0.081395 | 0.011628 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
461962b0ba932eec2de34b03536ee4c5fe8613b6 | 117 | py | Python | ad2/aula_09/leitura_com_close.py | renzon/fundamentos-de-programacao | 7d1f5df0e32646e71132f04fc780b1908b8cf818 | [
"MIT"
] | 6 | 2019-08-08T00:48:14.000Z | 2021-04-16T19:55:08.000Z | ad2/aula_09/leitura_com_close.py | renzon/fundamentos-de-programacao | 7d1f5df0e32646e71132f04fc780b1908b8cf818 | [
"MIT"
] | null | null | null | ad2/aula_09/leitura_com_close.py | renzon/fundamentos-de-programacao | 7d1f5df0e32646e71132f04fc780b1908b8cf818 | [
"MIT"
] | 2 | 2019-10-08T13:52:16.000Z | 2019-10-17T13:48:14.000Z | arquivo = open('exemplo.txt', 'r', encoding='utf8')
for linha in arquivo:
print(linha.strip())
arquivo.close()
| 16.714286 | 51 | 0.666667 | 16 | 117 | 4.875 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01 | 0.145299 | 117 | 6 | 52 | 19.5 | 0.77 | 0 | 0 | 0 | 0 | 0 | 0.136752 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
461c1b1b8f76ff692fffb19a1b71c970c2cd126d | 145 | py | Python | algorithms/bonAppetit.py | marismarcosta/hackerrank | 3580b4fe0094e2a13f9a7efeeb0e072810be9ebf | [
"MIT"
] | null | null | null | algorithms/bonAppetit.py | marismarcosta/hackerrank | 3580b4fe0094e2a13f9a7efeeb0e072810be9ebf | [
"MIT"
] | 3 | 2020-09-27T22:57:05.000Z | 2020-09-29T23:07:44.000Z | algorithms/bonAppetit.py | marismarcosta/hackerrank-challenges | 3580b4fe0094e2a13f9a7efeeb0e072810be9ebf | [
"MIT"
] | 1 | 2020-11-06T21:16:19.000Z | 2020-11-06T21:16:19.000Z | def bonAppetit(bill, k, b):
rest = b - int((sum(bill) - bill[k]) / 2)
if rest != 0:
print(rest)
else:
print('Bon Appetit')
return | 20.714286 | 43 | 0.558621 | 23 | 145 | 3.521739 | 0.695652 | 0.123457 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018519 | 0.255172 | 145 | 7 | 44 | 20.714286 | 0.731481 | 0 | 0 | 0 | 0 | 0 | 0.075342 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.285714 | 0.285714 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1ca3afeed1e74cb448eaeee2dca3f44594ac54ae | 922 | py | Python | village/consts.py | vasusingla/adversarial_poisons | a1d58d8e31bcb01da494f39ad2b0a707c09acbf4 | [
"MIT"
] | 21 | 2021-06-18T20:53:35.000Z | 2022-03-28T14:33:01.000Z | village/consts.py | vasusingla/adversarial_poisons | a1d58d8e31bcb01da494f39ad2b0a707c09acbf4 | [
"MIT"
] | 2 | 2021-07-29T09:38:00.000Z | 2022-03-24T23:07:26.000Z | village/consts.py | vasusingla/adversarial_poisons | a1d58d8e31bcb01da494f39ad2b0a707c09acbf4 | [
"MIT"
] | 6 | 2021-06-23T13:01:51.000Z | 2022-03-10T06:55:03.000Z | """Setup constants, ymmv."""
PIN_MEMORY = True
NON_BLOCKING = True
BENCHMARK = True
MAX_THREADING = 40
SHARING_STRATEGY = 'file_descriptor' # file_system or file_descriptor
DEBUG_TRAINING = False
DISTRIBUTED_BACKEND = 'gloo' # nccl would be faster, but require gpu-transfers for indexing and stuff
cifar10_mean = [0.4914672374725342, 0.4822617471218109, 0.4467701315879822]
cifar10_std = [0.24703224003314972, 0.24348513782024384, 0.26158785820007324]
cifar100_mean = [0.5071598291397095, 0.4866936206817627, 0.44120192527770996]
cifar100_std = [0.2673342823982239, 0.2564384639263153, 0.2761504650115967]
mnist_mean = (0.13066373765468597,)
mnist_std = (0.30810782313346863,)
imagenet_mean = [0.485, 0.456, 0.406]
imagenet_std = [0.229, 0.224, 0.225]
tiny_imagenet_mean = [0.4789886474609375, 0.4457630515098572, 0.3944724500179291]
tiny_imagenet_std = [0.27698642015457153, 0.2690644860267639, 0.2820819020271301]
| 40.086957 | 102 | 0.795011 | 115 | 922 | 6.191304 | 0.608696 | 0.035112 | 0.036517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.461446 | 0.099783 | 922 | 22 | 103 | 41.909091 | 0.396386 | 0.135575 | 0 | 0 | 0 | 0 | 0.024051 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1ca43873fbd9cafeac3ed2ac3cbba4679b684d25 | 6,946 | py | Python | assignments/assignment2/layers.py | tz3/dlcourse_ai | e1405bc45de9c084672ac3206215177c21f2e4b7 | [
"MIT"
] | null | null | null | assignments/assignment2/layers.py | tz3/dlcourse_ai | e1405bc45de9c084672ac3206215177c21f2e4b7 | [
"MIT"
] | null | null | null | assignments/assignment2/layers.py | tz3/dlcourse_ai | e1405bc45de9c084672ac3206215177c21f2e4b7 | [
"MIT"
] | null | null | null | import numpy as np
def l2_regularization(W, reg_strength):
'''
Computes L2 regularization loss on weights and its gradient
Arguments:
W, np array - weights
reg_strength - float value
Returns:
loss, single value - l2 regularization loss
gradient, np.array same shape as W - gradient of weight by l2 loss
'''
# TODO: implement l2 regularization and gradient
# Your final implementation shouldn't have any loops
# regularization_strength * sumij W[i, j]2
# loss = reg_strength * np.trace(np.dot(W.T, W)) # L2(W) = λ * tr(W.T * W)
loss = reg_strength * np.sum(W * W)
grad = 2 * reg_strength * W # dL2(W)/dW = 2 * λ * W
return loss, grad
def softmax(_predictions):
'''
Computes probabilities from scores
Arguments:
predictions, np array, shape is either (N) or (batch_size, N) -
classifier outp¬ut
Returns:
probs, np array of the same shape as predictions -
probability for every class, 0..1
'''
# TODO implement softmax
# Your final implementation shouldn't have any loops
predictions = _predictions.copy()
if len(predictions.shape) == 1:
predictions -= np.max(predictions) # , axis=1)[:,None]
values = np.exp(predictions)
probs = values / np.sum(values) # , axis=1)[:, None]
else:
predictions -= np.max(predictions, axis=1)[:, None]
values = np.exp(predictions)
probs = values / np.sum(values, axis=1)[:, None]
return probs
def cross_entropy_loss(probs, target_index):
'''
Computes cross-entropy loss
Arguments:
probs, np array, shape is either (N) or (batch_size, N) -
probabilities for every class
target_index: np array of int, shape is (1) or (batch_size) -
index of the true class for given sample(s)
Returns:
loss: single value
'''
# TODO implement cross-entropy
# Your final implementation shouldn't have any loops
old_result = cross_entropy_loss_old(probs, target_index)
if isinstance(target_index, int) or len(probs.shape) == 1:
return -np.log(probs[target_index])
else:
target_probs = probs[np.arange(len(target_index)), target_index.flatten()]
value = -np.log(target_probs)
result = np.mean(value)
assert old_result == result
return result
def cross_entropy_loss_old(probs, target_index):
'''
Computes cross-entropy loss
Arguments:
probs, np array, shape is either (N) or (batch_size, N) - probabilities for every class
target_index: np array of int, shape is (1) or (batch_size) - index of the true class for given sample(s)
Returns:
loss: single value
'''
rows = np.arange(target_index.shape[0])
cols = target_index
return np.mean(-np.log(probs[rows, cols])) # L
def softmax_with_cross_entropy(predictions, target_index):
'''
Computes softmax and cross-entropy loss for model predictions,
including the gradient
Arguments:
predictions, np array, shape is either (N) or (batch_size, N) -
classifier output
target_index: np array of int, shape is (1) or (batch_size) -
index of the true class for given sample(s)
Returns:
loss, single value - cross-entropy loss
dprediction, np array same shape as predictions - gradient of predictions by loss value
'''
# TODO implement softmax with cross-entropy
probs = softmax(predictions)
loss = cross_entropy_loss(probs, target_index)
# Your final implementation shouldn't have any loops
dprediction = probs.copy()
if len(predictions.shape) == 1:
dprediction[target_index] -= 1 # dL/dZ = (S - 1(y))
else:
dprediction[np.arange(len(dprediction)), target_index.flatten()] -= 1
dprediction = dprediction / target_index.shape[0]
return loss, dprediction
class Param:
"""
Trainable parameter of the model
Captures both parameter value and the gradient
"""
def __init__(self, value):
self.value = value
self.grad = np.zeros_like(value)
def reset_grad(self):
self.grad = np.zeros_like(self.value)
def __str__(self) -> str:
super().__str__()
return f'value: {self.value}, gradient: {self.grad}'
def ReLU(X):
return (X + np.abs(X)) / 2
class ReLULayer:
def __init__(self):
self.positive = None
self.x = None
pass
def forward(self, X):
# TODO: Implement forward pass
# Hint: you'll need to save some information about X
# to use it later in the backward pass
self.x = X
self.mask = (X > 0)
# result = ReLU(X)
return X * self.mask
def backward(self, d_out):
"""
Backward pass
Arguments:
d_out, np array (batch_size, num_features) - gradient
of loss function with respect to output
Returns:
d_result: np array (batch_size, num_features) - gradient
with respect to input
"""
# TODO: Implement backward pass
# Your final implementation shouldn't have any loops
d_result = self.mask * d_out
return d_result
def params(self):
# ReLU Doesn't have any parameters
return {}
class FullyConnectedLayer:
def __init__(self, n_input, n_output):
self.W = Param(0.001 * np.random.randn(n_input, n_output))
self.B = Param(0.001 * np.random.randn(1, n_output))
self.X = None
def forward(self, X):
# TODO: Implement forward pass
# Your final implementation shouldn't have any loops
self.X = X.copy()
result = np.dot(X, self.W.value) + self.B.value
return result
def backward(self, d_out):
"""
Backward pass
Computes gradient with respect to input and
accumulates gradients within self.W and self.B
Arguments:
d_out, np array (batch_size, n_output) - gradient
of loss function with respect to output
Returns:
d_result: np array (batch_size, n_input) - gradient
with respect to input
"""
# TODO: Implement backward pass
# Compute both gradient with respect to input
# and gradients with respect to W and B
# Add gradients of W and B to their `grad` attribute
# It should be pretty similar to linear classifier from
# n_input, n_output
# X = (batch_size, input_features)
# batch_size, n_output
# the previous assignment
dw = np.dot(self.X.T, d_out)
self.W.grad += dw
E = np.ones(shape=(1, self.X.shape[0]))
# 1 x batch_size * batch_size x n_output
db = np.dot(E, d_out)
self.B.grad += db
d_input = np.dot(d_out, self.W.value.T)
return d_input
def params(self):
return {'W': self.W, 'B': self.B}
| 29.308017 | 111 | 0.623092 | 948 | 6,946 | 4.451477 | 0.187764 | 0.044313 | 0.030332 | 0.042654 | 0.453081 | 0.427014 | 0.379147 | 0.332701 | 0.274408 | 0.22891 | 0 | 0.00823 | 0.282753 | 6,946 | 236 | 112 | 29.432203 | 0.838619 | 0.472646 | 0 | 0.178571 | 0 | 0 | 0.013564 | 0 | 0 | 0 | 0 | 0.029661 | 0.011905 | 1 | 0.202381 | false | 0.011905 | 0.011905 | 0.035714 | 0.416667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1cb027d9b2da3593fc7b1e1fad3d9e1fc5acbd27 | 38,762 | py | Python | Utilities/chartables.py | mathstuf/vtk-dicom | 5b1c74514feb90be6823d0cfbcfb7b5af6c0ec05 | [
"BSD-3-Clause"
] | 1 | 2019-10-31T06:15:33.000Z | 2019-10-31T06:15:33.000Z | Utilities/chartables.py | mathstuf/vtk-dicom | 5b1c74514feb90be6823d0cfbcfb7b5af6c0ec05 | [
"BSD-3-Clause"
] | null | null | null | Utilities/chartables.py | mathstuf/vtk-dicom | 5b1c74514feb90be6823d0cfbcfb7b5af6c0ec05 | [
"BSD-3-Clause"
] | 1 | 2021-11-16T06:41:26.000Z | 2021-11-16T06:41:26.000Z | """
Generate C++ character set conversion tables.
This script takes the conversion tables from https://encoding.spec.whatwg.org
as its input, performs some tweaks, and generates forward and reverse
compressed conversion tables.
For conversions in both directions, the code 0xFFFD will indicate a failed
character conversion. See http://www.unicode.org/faq/unsup_char.html
As of yet, this code doesn't provide generic replacements when converting
from unicode (e.g. whitespace of any kind could becomes "space" in the
target encoding) or generic removals (soft hyphen, word joiners, zero-width
space, variation selectors). These could be handled in the following manner:
If the table produces the replacement character, then the input unicode could
be checked to see if it fits into one of the above categories, and the
appropriate action could be taken.
"""
import bisect
import sys
import string
# replacement character
RCHAR = 0xFFFD
# convenient constants for forward vs. reverse encoding
Forward = False
Reverse = True
# interesting unicode ranges
cjk_punct = [0x3000,0x301F] # common cjk punctuation
cjk_kana = [0x3041,0x30FF] # japanese kana blocks
cjk_punct_kana = [0x3000,0x30FF] # puct and kana as one big block
cjk_unified = [0x4E00,0x9FFF] # cjk unified ideographs
kr_hangul = [0xAC00,0xD7A3] # korean hangul block
def readtable(fname):
"""Read a text file that contains a mapping table.
The table is assumed to have two or more columns,
with '#' for comments.
"""
f = open(fname, 'r')
lines = f.readlines()
f.close()
maxindex = 0
for l in lines:
l = l.strip()
if l and l[0] != '#':
columns = l.split()
maxindex = max(maxindex, int(columns[0],base=0))
table = [RCHAR]*(maxindex+1)
for l in lines:
l = l.strip()
if l and l[0] != '#':
columns = l.split()
if len(columns) > 1:
table[int(columns[0],base=0)] = int(columns[1],base=0)
return table
def readlinear(fname):
"""Read a linear table, each line becomes two consecutive elements.
Comments begin with '#'.
"""
f = open(fname, 'r')
lines = f.readlines()
f.close()
table = []
for l in lines:
l = l.strip()
if l and l[0] != '#':
columns = l.split()
table.append(int(columns[0],base=0))
table.append(int(columns[1],base=0))
return table
def readdict(fname):
"""Read a text file that contains a mapping table.
The table is assumed to have two or more columns,
with '#' for comments.
"""
d = {}
f = open(fname, 'r')
for l in f.readlines():
l = l.strip()
if l and l[0] != '#':
columns = l.split()
if len(columns) > 1:
d[int(columns[0],base=0)] = int(columns[1],base=0)
f.close()
return d
def makedict(table, reverse, *special):
"""Turn a table into a dict, since a dict is better for sparse data.
If "reverse" is set, the dict provides the reverse mapping.
"""
ranges = []
dicts = []
for s in special:
if type(s) == dict:
dicts.append(s)
else:
ranges.append(s)
d = {}
for j in range(len(table)):
if reverse:
i = j
c = table[j]
else:
c = j
i = table[j]
for r in ranges:
if c >= r[0] and c <= r[1]:
c = 0xFFFD
break
if i == 0xFFFD or c == 0xFFFD:
continue
if not d.has_key(c):
d[c] = i
for s in dicts:
for c,i in s.items():
for r in ranges:
if c >= r[0] and c <= r[1]:
c = 0xFFFD
break
if c == 0xFFFD:
continue
d[c] = i
return d
def maketable(d, maxrun=8, maxin=0xFFFF):
"""Given a dict created with "makedict", create a compressed table.
"""
keys = list(d.keys())
keys.sort()
utable = [0]
vtable = [RCHAR]
wtable = [RCHAR]
dtable = []
i = 0
while i < len(keys):
key = keys[i]
val = d[key]
assert val != RCHAR
j = i+1
jj = 0
k = RCHAR
# check for a character run
while (j < len(keys) and keys[j] == key+(j-i) and
d[keys[j]] == val+(j-i)):
j += 1
if j-i < maxrun:
k = len(dtable)
assert k != RCHAR
j = i+1
dtable.append(val)
val = RCHAR
runlen = 0
while j < len(keys) and keys[j] - keys[j-1] < maxrun:
key1 = keys[j-1]
key2 = keys[j]
if key2 == key1+1 and d[key2] == d[key1]+1:
runlen += 1
if runlen == maxrun:
dtable = dtable[0:-runlen]
j -= runlen
break
else:
runlen = 0
for n in range(key1+1,key2):
dtable.append(RCHAR)
jj += 1
dtable.append(d[key2])
j += 1
if len(dtable) > k:
runlen = 1
for kk in range(k+1,len(dtable)):
if dtable[kk] == dtable[kk-1]+1:
runlen += 1
if len(dtable) == k + runlen:
val = dtable[k]
dtable = dtable[0:k]
k = RCHAR
if utable[-1] == key:
vtable[-1] = val
wtable[-1] = k
else:
utable.append(key)
vtable.append(val)
wtable.append(k)
utable.append(key+(j+jj-i))
vtable.append(RCHAR)
wtable.append(RCHAR)
i = j
while utable[-1] > maxin:
utable = utable[0:-1]
vtable = vtable[0:-1]
wtable = wtable[0:-1]
return [len(utable)] + utable + vtable + wtable + dtable
"""
# simpler code that does not include 'dtable'
i = 0
while i < len(keys):
key = keys[i]
val = d[key]
# look for a run
j = i+1
while j < len(keys) and keys[j] == key+(j-i) and d[keys[j]] == val+(j-i):
j += 1
if utable[-1] == key:
vtable[-1] = val
else:
utable.append(key)
vtable.append(val)
utable.append(key+(j-i))
vtable.append(RCHAR)
i = j
"""
def maketable2(table, reverse, *special, **kw):
"""Given a dense table, create a compressed table.
If "reverse" is set, then the compressed table reverses the input table.
"""
d = makedict(table, reverse, *special)
t = maketable(d, **kw)
try:
maxin = kw['maxin']
except KeyError:
maxin = 0xFFFF
header = []
ranges = []
dicts = []
for s in special:
if type(s) == dict:
dicts.append(s)
else:
ranges.append(s)
header.append(len(ranges))
if ranges:
newdata = [ [] ]*len(ranges)
shift = 0
for r in ranges:
shift += r[1] - r[0] + 1
n = t[0]
p = 3*n + 1
t[p:p] = [RCHAR]*shift
for i in range(2*n+1,3*n+1):
if t[i] != RCHAR:
t[i] += shift
assert t[i] != RCHAR
for j in range(len(table)):
if reverse:
i = j
v = table[j]
else:
i = table[j]
v = j
p = 3*n + 1
for r in ranges:
if v >= r[0] and v <= r[1]:
t[v - r[0] + p] = i
p += r[1] - r[0] + 1
for s in dicts:
for v,i in s.items():
p = 3*n + 1
for r in ranges:
if v >= r[0] and v <= r[1]:
t[v - r[0] + p] = i
p += r[1] - r[0] + 1
p = 3*n + 1
for r in ranges:
j = bisect.bisect(t[1:n+1], r[0])
if t[j] == r[0]:
t[j+2*n] = p - 3*n - 1
assert t[j+2*n] != RCHAR
t[j+n] = RCHAR
else:
j += 1
t.insert(j+2*n, p - 3*n - 1)
assert t[j+2*n] != RCHAR
t.insert(j+n, RCHAR)
t.insert(j, r[0])
assert t[0] != RCHAR
n += 1
p += 3
header.append(j-1)
if j+1 >= n or t[j+1] != r[1] + 1:
t.insert(j+1+2*n, RCHAR)
t.insert(j+1+n, RCHAR)
t.insert(j+1, r[1] + 1)
assert t[j+1] != RCHAR
n += 1
p += 3
p += r[1] - r[0] + 1
t[0] = n
while t[n] > maxin:
del t[3*n]
del t[2*n]
del t[1*n]
n -= 1
t[0] = n
return header + t
def printrows(table, fmt, n):
h = fmt.find('%')
l = h+1
while fmt[l] in string.digits:
l += 1
w = str(h)
if l > h+1:
w = str(h + int(fmt[h+1:l]))
fmt1 = '%s' + '%' + w + 's,'
fmt2 = '%s' + fmt + ','
for k in range(len(table)):
u = table[k]
s = ' '
if k % n == 0:
s = '\n '
if u == RCHAR:
sys.stdout.write(fmt1 % (s,'RCHAR'))
else:
sys.stdout.write(fmt2 % (s,u))
def printtable(name, table, reverse, dtype='unsigned short', maxin=0xFFFF):
n = table[0]
htable = table[1:n+1]
l = table[n+1]
utable = table[n+2:n+l+2]
vtable = table[n+l+2:n+2*l+2]
wtable = table[n+2*l+2:n+3*l+2]
dtable = table[n+3*l+2:]
ufmt = '0x%04X'
ucnt = 8
cfmt = '%6d'
ccnt = 8
ufmt2 = '0x%04X'
cfmt2 = '%6d'
if reverse:
(ffmt2,ffmt) = (ufmt2,ufmt)
else:
(ffmt2,ffmt) = (cfmt2,cfmt)
if not reverse:
(tfmt2,tfmt,tcnt) = (ufmt2,ufmt,ucnt)
else:
(tfmt2,tfmt,tcnt) = (cfmt2,cfmt,ccnt)
sys.stdout.write('const %s %s[%d] = {' % (dtype, name, len(table)))
sys.stdout.write('\n // hot segments (indexes into segment table)')
printrows([n] + htable, '%d', 8)
sys.stdout.write('\n // number of segments')
printrows([l], '%d', 8)
sys.stdout.write('\n // segment table')
printrows(utable, ffmt2, 8)
sys.stdout.write('\n // compressed segments')
printrows(vtable, tfmt2, 8)
sys.stdout.write('\n // uncompressed segments')
printrows(wtable, tfmt2, 8)
itable = list(htable)
for i in range(l):
if i not in htable:
if htable and wtable[i] == RCHAR:
itable.insert(bisect.bisect_left(itable, i), i)
else:
itable.append(i)
for i in itable:
nextval = maxin+1
if i+1 < l:
nextval = utable[i+1]
s = nextval - utable[i]
v = vtable[i]
w = wtable[i]
if v != RCHAR or w != RCHAR:
sys.stdout.write(('\n // ['+ffmt+','+ffmt+']') %
(utable[i], nextval-1))
if v != RCHAR:
sys.stdout.write((' -> ['+tfmt+','+tfmt+'] # seg %d') % (v,v+s-1,i))
if w != RCHAR:
sys.stdout.write(' -v # seg %d at pos %d' % (i,w))
printrows(dtable[w:w+s], tfmt, tcnt)
sys.stdout.write('\n};\n')
def searchtable(table, x):
n = table[0]
l = table[n+1]
utable = table[n+2:n+l+2]
vtable = table[n+l+2:n+2*l+2]
wtable = table[n+2*l+2:n+3*l+2]
dtable = table[n+3*l+2:]
for j in table[1:n+1]:
if x >= utable[j] and x < utable[j]:
i = j
break
else:
i = bisect.bisect(utable, x) - 1
u = utable[i]
v = vtable[i]
if v == RCHAR:
v = wtable[i]
if v == RCHAR:
return RCHAR
return dtable[v + (x - u)]
elif v == RCHAR:
return RCHAR
else:
return v + (x - u)
def checktable(table, reverse, orig, *dicts):
for j in range(len(orig)):
if reverse:
i = j
k = orig[j]
else:
i = orig[j]
k = j
c = searchtable(table, k)
if k == 0xFFFD:
if c != RCHAR:
print "zerofail", i,c,hex(k)
elif i != c:
print "matchfail", i,c,hex(k)
for d in dicts:
for k,i in d.items():
c = searchtable(table, k)
if k == 0xFFFD:
if c != RCHAR:
print "zerofail", i,c,hex(k)
elif i != c:
print "matchfail", i,c,hex(k)
header = \
"""/*=========================================================================
This is an automatically generated file. Include errata for any changes.
=========================================================================*/"""
sys.stdout.write(header)
sys.stdout.write('\n\n')
sys.stdout.write('#include "vtkDICOMCharacterSetTables.h"\n')
sys.stdout.write('\n')
# ----
# ASCII
# ----
ascii = list(range(0,128))
table = [1] + maketable2(ascii, Forward, maxin=255)
checktable(table, Forward, ascii)
CodePageASCII = table
table = [1] + maketable2(ascii, Reverse)
checktable(table, Reverse, ascii)
CodePageASCII_R = table
sys.stdout.write("// %s\n" % ("ASCII",))
printtable("CodePageASCII", CodePageASCII, Forward, maxin=255)
sys.stdout.write('\n')
sys.stdout.write('// Reverse\n')
printtable("CodePageASCII_R", CodePageASCII_R, Reverse)
sys.stdout.write('\n')
# ----
# JIS X 0201
# ----
j0201 = list(range(0,161)) + list(range(0xFF61,0xFFA0)) + [RCHAR]*32
j0201[ord('\\')] = 0xA5
j0201[ord('~')] = 0x203E
j0201_compat = {
# allow conversion of backslash, tilde to yen, macron
ord('\\') : ord('\\'), ord('~') : ord('~'),
}
# allow fullwidth -> halfwidth conversion
for x,u in readdict('whatwg/index-iso-2022-jp-katakana.txt').items():
j0201_compat[u] = 161 + x
for x in range(94):
j0201_compat[0xFF01 + x] = 33 + x
j0201_compat[0xFFE3] = 126 # FULLWIDTH MACRON
j0201_compat[0xFFE5] = 92 # FULLWIDTH YEN
j0201_compat[0x3000] = 32 # IDEOGRAPHIC SPACE
table = maketable2(j0201, Forward, maxrun=1, maxin=255)
checktable(table, Forward, j0201)
CodePageJISX0201 = table
table = maketable2(j0201, Reverse, j0201_compat, maxrun=16)
checktable(table, Reverse, j0201)
CodePageJISX0201_R = table
sys.stdout.write("// %s\n" % ("JIS X 0201",))
printtable("CodePageJISX0201", CodePageJISX0201, Forward, maxin=255)
sys.stdout.write('\n')
sys.stdout.write('// Reverse\n')
printtable("CodePageJISX0201_R", CodePageJISX0201_R, Reverse)
sys.stdout.write('\n')
# ----
# ISO 8859 Code Pages
# ----
ISO8859 = {}
CodePageISO8859 = {}
CodePageISO8859_R = {}
comment = {
1 : "Latin1, Western Europe",
2 : "Latin2, Central Europe",
3 : "Latin3, Maltese",
4 : "Latin4, Northern Europe",
5 : "Cyrillic",
6 : "Arabic",
7 : "Greek",
8 : "Hebrew",
9 : "Latin5, Turkish",
10: "Latin6, Nordic Europe",
11: "Thai, from TIS-620",
13: "Latin7, Baltic Rim",
14: "Latin8, Celtic",
15: "Latin9, Euro 1",
16: "Latin10, Euro 2",
}
for i in [1,2,3,4,5,6,7,8,9,10,11,13,14,15,16]:
if i == 1:
ISO8859[i] = list(range(0,256))
elif i == 9:
ISO8859[i] = (list(range(0,160)) +
readtable('whatwg/index-windows-1254.txt')[32:])
elif i == 11:
ISO8859[i] = (list(range(0,160)) +
readtable('whatwg/index-windows-874.txt')[32:])
else:
ISO8859[i] = (list(range(0,128)) +
readtable('whatwg/index-iso-8859-%d.txt' % (i,)))
if i in [2, 3, 4, 10, 13, 14, 16]:
table = maketable2(ISO8859[i], Forward, [160,255], maxin=255)
else:
table = maketable2(ISO8859[i], Forward, maxrun=1, maxin=255)
table[0] += 1
table.insert(1, 0)
checktable(table, Forward, ISO8859[i])
CodePageISO8859[i] = table
if i in [2, 3, 4, 10, 13, 14, 16]:
table = maketable2(ISO8859[i], Reverse, maxrun=16) #[0xA7,0x17E])
else:
table = maketable2(ISO8859[i], Reverse, maxrun=1)
table[0] += 1
table.insert(1, 0)
checktable(table, Reverse, ISO8859[i])
CodePageISO8859_R[i] = table
sys.stdout.write("// %s\n" % (comment[i],))
printtable("CodePageISO8859_%d"%(i,), CodePageISO8859[i], Forward, maxin=255)
sys.stdout.write('\n')
sys.stdout.write('// Reverse\n')
printtable("CodePageISO8859_%d_R"%(i,), CodePageISO8859_R[i], Reverse)
sys.stdout.write('\n')
# ----
# Encodings of Korean
# ----
euckr = readdict('whatwg/index-euc-kr.txt')
KSX1001 = [RCHAR]*(8836 + 8822)
for x in range(0x81,0xFF):
for y in range(0x41,0xFF):
i = None
if x >= 0xA1 and y >= 0xA1:
i = (x-0xA1)*94 + (y-0xA1)
elif (y >= 0x41 and y <= 0x5A) or (y >= 0x61 and y <= 0x7A) or y >= 0x81:
a = x - 0x81
b = y - 0x41
if b >= 26:
b -= 6
if b >= 52:
b -= 6
if a < 32:
i = 8836 + a*178 + b
else:
i = 8836 + 3008 + a*84 + b
j = (x-0x81)*190 + (y-0x41)
try:
KSX1001[i] = euckr[j]
except KeyError:
pass
table = maketable2(KSX1001, Forward, [0,163], [1410,3759])
# [0,163], [1410,3759], [3854,8741], [8836,17657])
checktable(table, Forward, KSX1001)
CodePageKSX1001 = table
table = maketable2(KSX1001, Reverse, cjk_punct, kr_hangul)
checktable(table, Reverse, KSX1001)
CodePageKSX1001_R = table
sys.stdout.write("// Korean KS X 1001:1998")
sys.stdout.write("(codes over 8836 are for CP949 compatibility)\n")
printtable("CodePageKSX1001", CodePageKSX1001, Forward)
sys.stdout.write('\n')
sys.stdout.write('// Reverse\n')
printtable("CodePageKSX1001_R", CodePageKSX1001_R, Reverse)
sys.stdout.write('\n')
# ----
# Encodings of Simplified Chinese
# ----
# For reference: C++ code to decode GBK so GB2312 comes first
"""
if (a < 0xA1)
{
if (b > 0x7F) { b--; }
a = (a - 0x81)*190 + (b - 0x40) + 8836;
}
else if (b < 0xA1)
{
if (b > 0x7F) { b--; }
a = (a - 0xA1)*96 + (b - 0x40) + 8836 + 6080;
}
else
{
// GB2312
a = (a - 0xA1)*94 + (b - 0xA1);
}
"""
# Also use this table for GBK and GB2312
GB18030 = readtable('whatwg/index-gb18030.txt')
# Fix difference between whatwg table and official table
GB18030[6555] = 0xE5E5 # 0x3000, ideographic space (duplicate)
# Change GB18030-2005 to GB18030-2000 (DICOM uses GB18030-2000)
GB18030[7533] = 0xE7C7 # 0x1E3F
# Reorganize the 23940 codes so that GB2312 codes come first, this allows
# us to use the first 8836 entries as a GB2312 table.
GB2312 = []
block2 = list(GB18030[0:6080])
block3 = []
for i in range(6080,6080+190*94,190):
block3 += GB18030[i:i+96]
GB2312 += GB18030[i+96:i+190]
GB18030 = GB2312 + block2 + block3
# Add all linear mappings within the BMP to our GB18030 table
LinearGB18030 = readlinear('whatwg/index-gb18030-ranges.txt')
for i in range(0,len(LinearGB18030)-2,2):
x1 = LinearGB18030[i]
y = LinearGB18030[i+1]
x2 = min(LinearGB18030[i+2], 39420)
GB18030 += list(range(y,y+(x2-x1)))
# For Unicode to GBK mapping, ensure compatibility with the GBK mappings
# that pre-date the GB18030-2000 standard, as described in this table:
gbk_compat = {
# Compatibility mapping - m with acute
0x1E3F : 7533,
# PUA mappings (2) in GB2312
0xE7C7 : 7533, 0xE7C8 : 7536,
# PUA mappings (13) in GBK - ideographic description characters
0xE7E7 : 7672, 0xE7E8 : 7673, 0xE7E9 : 7674, 0xE7EA : 7675, 0xE7EB : 7676,
0xE7EC : 7677, 0xE7ED : 7678, 0xE7EE : 7679, 0xE7EF : 7680, 0xE7F0 : 7681,
0xE7F1 : 7682, 0xE7F2 : 7683, 0xE7F3 : 7684,
# PUA mappings (80) in GBK - various radicals and ideographs
0xE815 :23766, 0xE816 :23767, 0xE817 :23768, 0xE818 :23769, 0xE819 :23770,
0xE81A :23771, 0xE81B :23772, 0xE81C :23773, 0xE81D :23774, 0xE81E :23775,
0xE81F :23776, 0xE820 :23777, 0xE821 :23778, 0xE822 :23779, 0xE823 :23780,
0xE824 :23781, 0xE825 :23782, 0xE826 :23783, 0xE827 :23784, 0xE828 :23785,
0xE829 :23786, 0xE82A :23787, 0xE82B :23788, 0xE82C :23789, 0xE82D :23790,
0xE82E :23791, 0xE82F :23792, 0xE830 :23793, 0xE831 :23794, 0xE832 :23795,
0xE833 :23796, 0xE834 :23797, 0xE835 :23798, 0xE836 :23799, 0xE837 :23800,
0xE838 :23801, 0xE839 :23802, 0xE83A :23803, 0xE83B :23804, 0xE83C :23805,
0xE83D :23806, 0xE83E :23807, 0xE83F :23808, 0xE840 :23809, 0xE841 :23810,
0xE842 :23811, 0xE843 :23812, 0xE844 :23813, 0xE845 :23814, 0xE846 :23815,
0xE847 :23816, 0xE848 :23817, 0xE849 :23818, 0xE84A :23819, 0xE84B :23820,
0xE84C :23821, 0xE84D :23822, 0xE84E :23823, 0xE84F :23824, 0xE850 :23825,
0xE851 :23826, 0xE852 :23827, 0xE853 :23828, 0xE854 :23829, 0xE855 :23830,
0xE856 :23831, 0xE857 :23832, 0xE858 :23833, 0xE859 :23834, 0xE85A :23835,
0xE85B :23836, 0xE85C :23837, 0xE85D :23838, 0xE85E :23839, 0xE85F :23840,
0xE860 :23841, 0xE861 :23842, 0xE862 :23843, 0xE863 :23844, 0xE864 :23845,
# Compatibility mappings (8) not present in GB18030
0x9FB4 :23775, 0x9FB5 :23783, 0x9FB6 :23788, 0x9FB7 :23789, 0x9FB8 :23795,
0x9FB9 :23812, 0x9FBA :23829, 0x9FBB :23845,
# Vertical punctuation (10) within GB2312 range
0xFE10 : 7182, 0xFE11 : 7183, 0xFE12 : 7184, 0xFE13 : 7185, 0xFE14 : 7186,
0xFE15 : 7187, 0xFE16 : 7188, 0xFE17 : 7189, 0xFE18 : 7190, 0xFE19 : 7191,
# Points beyond the BMP (6)
#0x20087 : 23767, 0x20089 : 23768, 0x200CC : 23769, 0x215D7 : 23794,
#0x2298F : 23804, 0x241FE : 23830,
}
# re-number GBK codes in the above so that GB2312 comes first
for k in gbk_compat.keys():
v = gbk_compat[k]
(a,b) = (v/190,v%190)
if a < 32:
v += 8836 # make room for GB2312
elif b < 96:
v = 8836 + 6080 + (a-32)*96 + b
else:
v = (a-32)*94 + (b-96)
gbk_compat[k] = v
GBKCOMPAT=[RCHAR]*(1 + max(gbk_compat.keys()))
table = maketable2(GBKCOMPAT, Reverse, gbk_compat)
GBKCompatTable = table
def removesegs(table, x, y=0):
"""Remove the final segments from a table, to allow mapping of
replacement code 0xFFFD as if it were a normal code."""
m = table[0] + 1
n = table[m]
table[m] = n-x
m += 1
table[m+n-x-y:m+n-y] = []
table[m+2*(n-x)-y:m+n-x+n-y] = []
table[m+3*(n-x)-y:m+2*(n-x)+n-y] = []
table = maketable2(GB18030, Forward, [0,93], [1410,8177])
checktable(table, 0, GB18030)
removesegs(table, 2, 1)
CodePageGB18030 = table
# for compatibility with GBK 1.0, be sure to map old GBK PUA codes
table = maketable2(GB18030, Reverse, cjk_punct, cjk_unified)
checktable(table, 1, GB18030)
removesegs(table, 2)
CodePageGB18030_R = table
sys.stdout.write("// Simplified Chinese GB18030-2000\n")
printtable("CodePageGB18030", CodePageGB18030, Forward)
sys.stdout.write('\n')
sys.stdout.write('// Reverse\n')
printtable("CodePageGB18030_R", CodePageGB18030_R, Reverse)
sys.stdout.write('\n')
sys.stdout.write('// Compatibility overlay for GBK and GB2312\n')
printtable("CodePageGBK_R", GBKCompatTable, Reverse)
sys.stdout.write('\n')
# ----
# Encodings of Japanese
# ----
JISX0208 = readtable('whatwg/index-jis0208.txt')
JISX0208[8836:10716] = range(0xE000,0xE000+(10716-8836)) # EUDC
JISX0208[16] = 0xFFE3 # FULLWIDTH MACRON
JISX0208[28] = 0x2014 # EM DASH
JISX0208[31] = 0xFF3C # FULLWIDTH REVERSE SOLIDUS
JISX0208[32] = 0x301C # WAVE DASH
JISX0208[33] = 0x2016 # DOUBLE VERTICAL LINE
JISX0208[60] = 0x2212 # MINUS SIGN
JISX0208[78] = 0xFFE5 # FULLWIDTH YEN SIGN
JISX0208[80] = 0x00A2 # CENT SIGN
JISX0208[81] = 0x00A3 # POUND SIGN
JISX0208[137] = 0x00AC # NOT SIGN
JISX0212 = readtable('whatwg/index-jis0212.txt')
JISX0212[116] = 0xFF5E # FULLWIDTH TILDE
JISX0212[128] = 0x00A6 # BROKEN BAR
JISX02XX = JISX0208[0:8836] + JISX0212
# remove NEC extensions
JISX02XX[12*94:13*94] = [RCHAR]*94
JISX02XX[88*94:92*94] = [RCHAR]*(94*4)
# The FULLWIDTH TILDE is a problem, because it can map either to
# JIS X 0208 or JIS X 0212, and the former is preferred for CP932.
# Also note that for CP932, PUA 0xE000 to 0xE757 (1880 points) should
# map to the space between the JIS X 0208 table and the CP932 extensions.
# ( also see http://www.unicode.org/L2/L2014/14198-wave-dash.pdf )
j_compat = {
0xFFE3 : 16, # FULLWIDTH MACRON
0x203E : 16, # OVERLINE (jisx0213)
0x2014 : 28, # EM DASH
0x2015 : 28, # HORIZONTAL BAR
0xFF3C : 31, # FULLWIDTH REVERSE SOLIDUS
0x005C : 31, # REVERSE SOLIDUS (jisx0201 compatibility)
0x301C : 32, # WAVE DASH
# 0xFF5E : 32, # FULLWIDTH TILDE (cp932)
0x2016 : 33, # DOUBLE VERTICAL LINE
0x2225 : 33, # PARALLEL TO (cp932)
0x2212 : 60, # MINUS SIGN
0xFF0D : 60, # FULLWIDTH HYPHEN-MINUS (cp932)
0xFFE5 : 78, # FULLWIDTH YEN SIGN
0x00A5 : 78, # YEN SIGN (jisx0213)
0x00A2 : 80, # CENT SIGN
0xFFE0 : 80, # FULLWIDTH CENT SIGN (cp932)
0x00A3 : 81, # POUND SIGN
0xFFE1 : 81, # FULLWIDTH POUND SIGN (cp932)
0x00AC : 137, # NOT SIGN
0xFFE2 : 137, # FULLWIDTH NOT SIGN (cp932)
0x007E : 8952, # TILDE (jisx0201 compatibility)
0xFFE4 : 8964, # FULLWIDTH BROKEN BAR
0x525D : 3648, # U+5265 (jouyou kanji compatibility)
# 0x20B9F : 2561, # U+53F1 (jouyou kanji compatibility)
}
# make a reversible table for CP932
JISX0208_R = ([RCHAR]*(94*12) + JISX0208[12*94:13*94] +
[RCHAR]*(94*75) + JISX0208[88*94:94*94] +
range(0xE000,0xE758))
JISX0208_R[32] = 0xFF5E # FULLWIDTH TILDE (in JIS0212)
# remove duplicate mappings
JISX0208_R[1207] = RCHAR # 159 0x2252
JISX0208_R[1208] = RCHAR # 158 0x2261
JISX0208_R[1209] = RCHAR # 166 0x222b
JISX0208_R[1212] = RCHAR # 162 0x221a
JISX0208_R[1213] = RCHAR # 154 0x22a5
JISX0208_R[1214] = RCHAR # 153 0x2220
JISX0208_R[1217] = RCHAR # 165 0x2235
JISX0208_R[1218] = RCHAR # 126 0x2229
JISX0208_R[1219] = RCHAR # 125 0x222a
# the following jouyou kanji have compatibility mappings for JISX0208
JISX0208_R[3405] = 0x5861 # U+586B (in JIS0212)
JISX0208_R[3990] = 0x9830 # U+982C (in JIS0212)
# map halfwidth kana to fullwidth
for x,u in readdict('whatwg/index-iso-2022-jp-katakana.txt').items():
if u <= 0x3002:
JISX0208_R[u - 0x3000] = 0xFF61 + x
elif u <= 0x300D:
JISX0208_R[u - 0x300C + 53] = 0xFF61 + x
elif u <= 0x309C:
JISX0208_R[u - 0x309B + 10] = 0xFF61 + x
elif u == 0x30FB:
JISX0208_R[u - 0x30FB + 5] = 0xFF61 + x
elif u == 0x30FC:
JISX0208_R[u - 0x30FC + 27] = 0xFF61 + x
else:
JISX0208_R[u - 0x30A1 + 376] = 0xFF61 + x
table = maketable2(JISX0208_R, Reverse, maxrun=2)
checktable(table, Reverse, JISX0208_R)
CodePageJISX0208_R = table
# more finicky details: half-width katakana to full-width if JISX0201 is
# not available, full-width katakana to half-width if only JISX0201
table = maketable2(JISX0208, Forward, [0,689], [1410,7807])
checktable(table, Forward, JISX0208)
CodePageJISX0208 = table
table = maketable2(JISX0212, Forward, [1410, 7210])
checktable(table, Forward, JISX0212)
CodePageJISX0212 = table
table = maketable2(JISX02XX, Reverse, cjk_punct_kana, cjk_unified, j_compat)
checktable(table, Reverse, JISX02XX, j_compat)
CodePageJISX_R = table
sys.stdout.write("// Japanese JIS X 0208, plus CP932 compatibility\n")
printtable("CodePageJISX0208", CodePageJISX0208, Forward)
sys.stdout.write('\n')
sys.stdout.write("// Japanese JIS X 0212\n")
printtable("CodePageJISX0212", CodePageJISX0212, Forward)
sys.stdout.write('\n')
sys.stdout.write('// Reverse (values >= 8836 are JIS X 0212)\n')
printtable("CodePageJISX_R", CodePageJISX_R, Reverse)
sys.stdout.write('\n')
sys.stdout.write('// Reverse table overlay with CP932 extensions\n')
printtable("CodePageJISX0208_R", CodePageJISX0208_R, Reverse)
sys.stdout.write('\n')
# ----
# Encodings of Traditional Chinese
# ----
# Read Big5, keep ETEN but ignore HKSCS
# (ETEN adds 408 chars including japanese, cyrillic)
BIG5_HKSCS = readtable('whatwg/index-big5.txt')
BIG5_PRIV1 = list(range(0xEEB8,0xF6B1)) # CP950 compatibility
BIG5_PRIV2 = list(range(0xE311,0xEEB8)) # CP950 compatibility
BIG5_PRIV4 = list(range(0xE000,0xE311)) # CP950 compatibility
BIG5 = BIG5_PRIV1 + BIG5_PRIV2 + BIG5_HKSCS[5024:18997] + BIG5_PRIV4
# These codes in ETEN block map outside BMP, replace with private codes
BIG5[11205] = 0xF7E6 # 0x200CC
BIG5[11207] = 0xF7E8 # 0x2008A
BIG5[11213] = 0xF7EE # 0x27607
# For CP950 compatibility, fill unused slots with EUDC
for i in range(10896,11304):
if BIG5[i] == 0xFFFD:
BIG5[i] = i - 10896 + 0xF6B1
# Duplicates must be removed from table before inverting it
BIG5_R = list(BIG5)
# Replace with hangzhou digits from Unicode 3.0
BIG5_R[5287] = 0x3038 # 5512 0x5341
BIG5_R[5288] = 0x3039 # None 0x5344
BIG5_R[5289] = 0x303A # 5599 0x5345
# Remove duplicate cjk characters in ETEN
BIG5_R[10957] = 0xFFFD # 5104 0x3003 (DITTO MARK)
BIG5_R[10958] = 0xFFFD # 11345 0x4edd (ideographic ditto)
# Remove duplicate box-drawing characters
BIG5_R[18975] = 0xFFFD # 5248 0x255e
BIG5_R[18976] = 0xFFFD # 5249 0x256a
BIG5_R[18977] = 0xFFFD # 5250 0x2561
BIG5_R[18991] = 0xFFFD # 5247 0x2550
BIG5_R[18992] = 0xFFFD # 5243 0x256d
BIG5_R[18993] = 0xFFFD # 5244 0x256e
BIG5_R[18994] = 0xFFFD # 5245 0x2570
BIG5_R[18995] = 0xFFFD # 5246 0x256f
big5_compat = {
0x3038 : 5287, # HANGZHOU NUMERAL 10
0x3039 : 5288, # HANGZHOU NUMERAL 20
0x5344 : 5288, # cjk unified ideograph
0x303A : 5289, # HANGZHOU NUMERAL 30
}
# compatibility for CP950 private codes
for i in range(10896,11304):
big5_compat[i - 10896 + 0xF6B1] = i
table = maketable2(BIG5, 0)
checktable(table, 0, BIG5)
CodePageBig5 = table
table = maketable2(BIG5_R, 1, cjk_punct, cjk_unified, big5_compat)
checktable(table, 1, BIG5_R, big5_compat)
CodePageBig5_R = table
sys.stdout.write("// Traditional Chinese Big5 with ETEN extensions\n")
sys.stdout.write("// Needs special handling for ETEN outside BMP:\n")
sys.stdout.write("// 11205 -> 0x200CC, 11207 -> 0x2008A, 11213 -> 0x27607\n")
printtable("CodePageBig5", CodePageBig5, Forward)
sys.stdout.write('\n')
sys.stdout.write('// Reverse\n')
printtable("CodePageBig5_R", CodePageBig5_R, Reverse)
sys.stdout.write('\n')
# ----
# Windows Code Pages
# ----
CP = {}
CodePageWindows = {}
CodePageWindows_R = {}
comment = {
874 : "Windows Thai, extends iso-8859-11",
1250: "Windows Central Europe",
1251: "Windows Cyrillic",
1252: "Windows Latin1, extents iso-8859-1",
1253: "Windows Greek",
1254: "Windows Turkish, extents iso-8859-9",
1255: "Windows Hebrew",
1256: "Windows Arabic",
1257: "Windows Baltic Rim",
}
for i in [874,1250,1251,1252,1253,1254,1255,1256,1257]:
CP[i] = (list(range(0,128)) +
readtable('whatwg/index-windows-%d.txt' % (i,)))
if i in [1254]:
table = maketable2(CP[i], Forward, [128,159], maxrun=1, maxin=255)
table = [0] + table[2:]
elif i in [1255]:
table = maketable2(CP[i], Forward, [128,191], maxrun=1, maxin=255)
table = [0] + table[2:]
else:
table = maketable2(CP[i], Forward, maxrun=16, maxin=255)
table[0] += 1
table.insert(1, 0)
checktable(table, Forward, CP[i])
CodePageWindows[i] = table
table = maketable2(CP[i], Reverse, maxrun=16)
table[0] += 1
table.insert(1, 0)
checktable(table, Reverse, CP[i])
CodePageWindows_R[i] = table
sys.stdout.write("// %s\n" % (comment[i],))
printtable("CodePageWindows%d"%(i,), CodePageWindows[i], Forward, maxin=255)
sys.stdout.write('\n')
sys.stdout.write('// Reverse\n')
printtable("CodePageWindows%d_R"%(i,), CodePageWindows_R[i], Reverse)
sys.stdout.write('\n')
# ----
# KOI8 Code Pages
# ----
KOI8 = (list(range(0,128)) +
readtable('whatwg/index-koi8-u.txt'))
# remove all non-alphabetic characters except nbsp, interpunct, copyright
for i in range(0x80,0xC0):
if KOI8[i] < 0x400 or KOI8[i] >= 0x500:
if KOI8[i] not in [0xA0,0xA9,0xB7]:
KOI8[i] = RCHAR
ftable = maketable2(KOI8, Forward, [160,255], maxin=255)
ftable[0] += 1
ftable.insert(1, 0)
checktable(ftable, Forward, KOI8)
rtable = maketable2(KOI8, Reverse, [0x0400,0x0491])
rtable[0] += 1
rtable.insert(1, 0)
checktable(rtable, Reverse, KOI8)
sys.stdout.write("// koi8 with extra cyrillic letters from koi8-ru\n")
printtable("CodePageKOI8", ftable, Forward, maxin=255)
sys.stdout.write('\n')
sys.stdout.write('// Reverse\n')
printtable("CodePageKOI8_R", rtable, Reverse)
sys.stdout.write('\n')
# this must be consistent with the enum in vtkDICOMCharacterSet.h
ISO_2022 = 32
ISO_IR_6 = 0 # US_ASCII
ISO_IR_13 = 1 # JIS X 0201, japanese romaji + katakana
ISO_IR_100 = 8 # ISO-8859-1, latin1, western europe
ISO_IR_101 = 9 # ISO-8859-2, latin2, central europe
ISO_IR_109 = 10 # ISO-8859-3, latin3, maltese
ISO_IR_110 = 11 # ISO-8859-4, latin4, baltic
ISO_IR_144 = 12 # ISO-8859-5, cyrillic
ISO_IR_127 = 13 # ISO-8859-6, arabic
ISO_IR_126 = 14 # ISO-8859-7, greek
ISO_IR_138 = 15 # ISO-8859-8, hebrew
ISO_IR_148 = 16 # ISO-8859-9, latin5, turkish
X_LATIN6 = 17 # ISO-8859-10, latin6, nordic
ISO_IR_166 = 18 # ISO-8859-11, thai
X_LATIN7 = 19 # ISO-8859-13, latin7, baltic rim
X_LATIN8 = 20 # ISO-8859-14, latin8, celtic
X_LATIN9 = 21 # ISO-8859-15, latin9, western europe
X_LATIN10 = 22 # ISO-8859-16, latin10, southeastern europe
X_EUCKR = 24 # euc-kr, ISO_IR_149 without escape codes
X_GB2312 = 25 # gb2312, ISO_IR_58 without escape codes
ISO_2022_IR_6 = 32 # US_ASCII
ISO_2022_IR_13 = 33 # JIS X 0201, japanese katakana
ISO_2022_IR_87 = 34 # JIS X 0208, japanese 94x94 primary
ISO_2022_IR_159 = 36 # JIS X 0212, japanese 94x94 secondary
ISO_2022_IR_100 = 40 # ISO-8859-1, latin1, western europe
ISO_2022_IR_101 = 41 # ISO-8859-2, latin2, central europe
ISO_2022_IR_109 = 42 # ISO-8859-3, latin3, maltese
ISO_2022_IR_110 = 43 # ISO-8859-4, latin4, baltic
ISO_2022_IR_144 = 44 # ISO-8859-5, cyrillic
ISO_2022_IR_127 = 45 # ISO-8859-6, arabic
ISO_2022_IR_126 = 46 # ISO-8859-7, greek
ISO_2022_IR_138 = 47 # ISO-8859-8, hebrew
ISO_2022_IR_148 = 48 # ISO-8859-9, latin5, turkish
ISO_2022_IR_166 = 50 # ISO-8859-11, thai
ISO_2022_IR_149 = 56 # the KS X 1001 part of ISO-2022-KR
ISO_2022_IR_58 = 57 # the GB2312 part of ISO-2022-CN
ISO_IR_192 = 64 # UTF-8, unicode
GB18030 = 65 # gb18030, chinese with full unicode mapping
GBK = 66 # gbk, chinese
X_BIG5 = 67 # big5 + ETEN, traditional chinese
X_EUCJP = 69 # euc-jp, unix encoding for japanese
X_SJIS = 70 # windows-31j, aka shift-jis, code page 932
X_CP874 = 76 # cp1162, thai (windows-874)
X_CP1250 = 80 # cp1250, central europe
X_CP1251 = 81 # cp1251, cyrillic
X_CP1252 = 82 # cp1252, western europe
X_CP1253 = 83 # cp1253, greek
X_CP1254 = 84 # cp1254, turkish
X_CP1255 = 85 # cp1255, hebrew
X_CP1256 = 86 # cp1256, arabic
X_CP1257 = 87 # cp1257, baltic rim
X_KOI8 = 90 # koi, cyrillic
pages = {
ISO_IR_6 : ('CodePageASCII', 'CodePageASCII_R'),
ISO_IR_13 : ('CodePageJISX0201', 'CodePageJISX0201_R'),
ISO_IR_100 : ('CodePageISO8859_1', 'CodePageISO8859_1_R'),
ISO_IR_101 : ('CodePageISO8859_2', 'CodePageISO8859_2_R'),
ISO_IR_109 : ('CodePageISO8859_3', 'CodePageISO8859_3_R'),
ISO_IR_110 : ('CodePageISO8859_4', 'CodePageISO8859_4_R'),
ISO_IR_144 : ('CodePageISO8859_5', 'CodePageISO8859_5_R'),
ISO_IR_127 : ('CodePageISO8859_6', 'CodePageISO8859_6_R'),
ISO_IR_126 : ('CodePageISO8859_7', 'CodePageISO8859_7_R'),
ISO_IR_138 : ('CodePageISO8859_8', 'CodePageISO8859_8_R'),
ISO_IR_148 : ('CodePageISO8859_9', 'CodePageISO8859_9_R'),
X_LATIN6 : ('CodePageISO8859_10', 'CodePageISO8859_10_R'),
ISO_IR_166 : ('CodePageISO8859_11', 'CodePageISO8859_11_R'),
X_LATIN7 : ('CodePageISO8859_13', 'CodePageISO8859_13_R'),
X_LATIN8 : ('CodePageISO8859_14', 'CodePageISO8859_14_R'),
X_LATIN9 : ('CodePageISO8859_15', 'CodePageISO8859_15_R'),
X_LATIN10 : ('CodePageISO8859_16', 'CodePageISO8859_16_R'),
X_EUCKR : ('CodePageKSX1001', 'CodePageKSX1001_R'),
X_GB2312 : ('CodePageGB18030', 'CodePageGBK_R'),
ISO_2022_IR_6 : ('CodePageASCII', 'CodePageASCII_R'),
ISO_2022_IR_13 : ('CodePageJISX0201', 'CodePageJISX0201_R'),
ISO_2022_IR_87 : ('CodePageJISX0208', 'CodePageJISX_R'),
ISO_2022_IR_159 : ('CodePageJISX0212', 'CodePageJISX_R'),
ISO_2022_IR_100 : ('CodePageISO8859_1', 'CodePageISO8859_1_R'),
ISO_2022_IR_101 : ('CodePageISO8859_2', 'CodePageISO8859_2_R'),
ISO_2022_IR_109 : ('CodePageISO8859_3', 'CodePageISO8859_3_R'),
ISO_2022_IR_110 : ('CodePageISO8859_4', 'CodePageISO8859_4_R'),
ISO_2022_IR_144 : ('CodePageISO8859_5', 'CodePageISO8859_5_R'),
ISO_2022_IR_127 : ('CodePageISO8859_6', 'CodePageISO8859_6_R'),
ISO_2022_IR_126 : ('CodePageISO8859_7', 'CodePageISO8859_7_R'),
ISO_2022_IR_138 : ('CodePageISO8859_8', 'CodePageISO8859_8_R'),
ISO_2022_IR_148 : ('CodePageISO8859_9', 'CodePageISO8859_9_R'),
ISO_2022+X_LATIN6 : ('CodePageISO8859_10', 'CodePageISO8859_10_R'),
ISO_2022_IR_166 : ('CodePageISO8859_11', 'CodePageISO8859_11_R'),
ISO_2022+X_LATIN7 : ('CodePageISO8859_13', 'CodePageISO8859_13_R'),
ISO_2022+X_LATIN8 : ('CodePageISO8859_14', 'CodePageISO8859_14_R'),
ISO_2022+X_LATIN9 : ('CodePageISO8859_15', 'CodePageISO8859_15_R'),
ISO_2022+X_LATIN10 : ('CodePageISO8859_16', 'CodePageISO8859_16_R'),
ISO_2022_IR_149 : ('CodePageKSX1001', 'CodePageKSX1001_R'),
ISO_2022_IR_58 : ('CodePageGB18030', 'CodePageGBK_R'),
GB18030 : ('CodePageGB18030', 'CodePageGB18030_R'),
GBK : ('CodePageGB18030', 'CodePageGBK_R'),
X_BIG5 : ('CodePageBig5', 'CodePageBig5_R'),
X_EUCJP : ('CodePageJISX0208', 'CodePageJISX_R'),
X_SJIS : ('CodePageJISX0208', 'CodePageJISX0208_R'),
X_CP874 : ('CodePageWindows874', 'CodePageWindows874_R'),
X_CP1250 : ('CodePageWindows1250', 'CodePageWindows1250_R'),
X_CP1251 : ('CodePageWindows1251', 'CodePageWindows1251_R'),
X_CP1252 : ('CodePageWindows1252', 'CodePageWindows1252_R'),
X_CP1253 : ('CodePageWindows1253', 'CodePageWindows1253_R'),
X_CP1254 : ('CodePageWindows1254', 'CodePageWindows1254_R'),
X_CP1255 : ('CodePageWindows1255', 'CodePageWindows1255_R'),
X_CP1256 : ('CodePageWindows1256', 'CodePageWindows1256_R'),
X_CP1257 : ('CodePageWindows1257', 'CodePageWindows1257_R'),
X_KOI8 : ('CodePageKOI8', 'CodePageKOI8_R'),
}
table = [('0','0')]*256
for x,y in pages.items():
table[x] = y
sys.stdout.write(
'const unsigned short *vtkDICOMCharacterSet::Table[256] = {\n')
for l in table:
sys.stdout.write(' %s,\n' % (l[0],))
sys.stdout.write('};\n\n')
sys.stdout.write(
'const unsigned short *vtkDICOMCharacterSet::Reverse[256] = {\n')
for l in table:
sys.stdout.write(' %s,\n' % (l[1],))
sys.stdout.write('};\n')
| 33.794246 | 81 | 0.604587 | 5,387 | 38,762 | 4.269723 | 0.202525 | 0.026216 | 0.040781 | 0.020869 | 0.270988 | 0.234033 | 0.203426 | 0.158558 | 0.110778 | 0.102909 | 0 | 0.174267 | 0.252103 | 38,762 | 1,146 | 82 | 33.823735 | 0.619145 | 0.148728 | 0 | 0.248315 | 1 | 0 | 0.141924 | 0.020701 | 0.001124 | 0 | 0.050459 | 0 | 0.007865 | 0 | null | null | 0.001124 | 0.003371 | null | null | 0.037079 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1cb1da58e3c1793b4b5a063a1d1b0abc61db9ff8 | 377 | py | Python | test/test_riak.py | claytonbrown/simhash-db-py | f9feca5b09cf094c65548c53ad596ab07cf41639 | [
"MIT"
] | 51 | 2015-01-12T02:10:29.000Z | 2021-09-25T08:51:37.000Z | test/test_riak.py | claytonbrown/simhash-db-py | f9feca5b09cf094c65548c53ad596ab07cf41639 | [
"MIT"
] | 5 | 2015-01-15T15:42:37.000Z | 2018-02-18T06:46:51.000Z | test/test_riak.py | claytonbrown/simhash-db-py | f9feca5b09cf094c65548c53ad596ab07cf41639 | [
"MIT"
] | 21 | 2015-05-01T15:06:39.000Z | 2021-07-29T01:12:37.000Z | #! /usr/bin/env python
'''Make sure the Riak client is sane'''
import unittest
from test import BaseTest
from simhash_db import Client
class RiakTest(BaseTest, unittest.TestCase):
'''Test the Riak client'''
def make_client(self, name, num_blocks, num_bits):
return Client('riak', name, num_blocks, num_bits)
if __name__ == '__main__':
unittest.main()
| 20.944444 | 57 | 0.70557 | 53 | 377 | 4.754717 | 0.566038 | 0.055556 | 0.103175 | 0.126984 | 0.15873 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.180371 | 377 | 17 | 58 | 22.176471 | 0.815534 | 0.201592 | 0 | 0 | 0 | 0 | 0.041379 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.375 | 0.125 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 2 |
1cb2398734496b92fe717f59c962c7f511771a93 | 5,736 | py | Python | ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/spnego_kerberos_auth.py | zyclove/ambari | 1032f0f54cb7b312b9a3b37570cd840f4e1e89d4 | [
"Apache-2.0"
] | null | null | null | ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/spnego_kerberos_auth.py | zyclove/ambari | 1032f0f54cb7b312b9a3b37570cd840f4e1e89d4 | [
"Apache-2.0"
] | null | null | null | ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/spnego_kerberos_auth.py | zyclove/ambari | 1032f0f54cb7b312b9a3b37570cd840f4e1e89d4 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python2
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import logging
import httplib
import os
logger = logging.getLogger()
try:
import kerberos
except ImportError:
import krberr as kerberos
logger.warn('import kerberos exception: %s' % str(ImportError))
pass
class SPNEGOKerberosAuth:
def __init__(self):
self.krb_context = None
def authenticate_handshake (self, connection, method, service_url, body, headers, kinit_cmd, klist_cmd):
# kinit to ensure ticket valid
self.execute_kinit(kinit_cmd, klist_cmd)
try:
# Authenticate the client request
response = self.authenticate_client(connection, method, service_url, body, headers)
# Authenticate the response from the server
if response:
self.authenticate_server(response)
return response
finally:
# Clean the client context after the handshake
self.clean_client_context()
pass
def execute_kinit(self, kinit_cmd, klist_cmd):
exit_status = os.system(kinit_cmd)
logger.debug("kinit exit_status: {0}".format(exit_status))
logger.debug(os.system(klist_cmd))
return exit_status
def authenticate_client(self, connection, method, service_url, body, headers):
service = "HTTP@%s" % connection.host.lower()
logger.debug("connection: %s", connection)
logger.debug("service: %s", service)
auth_header = self.get_authorization_header(service)
logger.debug("Authorization: %s" % auth_header)
# Send 2nd HTTP request with authorization header
headers['Authorization'] = auth_header
try:
connection.request(method, service_url, body, headers)
response = connection.getresponse()
except Exception, e:
logger.warn('2nd HTTP request exception from server: %s' % str(e))
return None
pass
if response:
logger.debug("2nd HTTP response from server: retcode = {0}, reason = {1}"
.format(response.status, response.reason))
logger.debug(str(response.read()))
logger.debug("response headers: {0}".format(response.getheaders()))
return response
def get_authorization_header(self, service):
# Initialize the context object for client-side authentication with a service principal
try:
result, self.krb_context = kerberos.authGSSClientInit(service)
if result == -1:
logger.warn('authGSSClientInit result: {0}'.format(result))
return None
except kerberos.GSSError, e:
logger.warn('authGSSClientInit exception: %s' % str(e))
return None
pass
# Process the first client-side step with the context
try:
result = kerberos.authGSSClientStep(self.krb_context, "")
if result == -1:
logger.warn('authGSSClientStep result for authenticate client: {0}'.format(result))
return None
except kerberos.GSSError, e:
logger.warn('authGSSClientStep exception for authenticate client: %s' % str(e))
return None
pass
# Get the client response from the first client-side step
try:
negotiate_value = kerberos.authGSSClientResponse(self.krb_context)
logger.debug("authGSSClientResponse response:{0}".format(negotiate_value))
except kerberos.GSSError, e:
logger.warn('authGSSClientResponse exception: %s' % str(e))
return None
pass
# Build the authorization header
return "Negotiate %s" % negotiate_value
def authenticate_server(self, response):
auth_header = response.getheader('www-authenticate', None)
negotiate_value = self.get_negotiate_value(auth_header)
if negotiate_value == None:
logger.warn('www-authenticate header not found')
# Process the client-side step with the context and the negotiate value from 2nd HTTP response
try:
result = kerberos.authGSSClientStep(self.krb_context, negotiate_value)
if result == -1:
logger.warn('authGSSClientStep result for authenticate server: {0}'.format(result))
except kerberos.GSSError, e:
logger.warn('authGSSClientStep exception for authenticate server: %s' % str(e))
result = -1
pass
return result
def clean_client_context(self):
# Destroy the context for client-side authentication
try:
result = kerberos.authGSSClientClean(self.krb_context)
logger.debug("authGSSClientClean result:{0}".format(result))
except kerberos.GSSError, e:
logger.warn('authGSSClientClean exception: %s' % str(e))
result = -1
pass
return result
def get_hadoop_auth_cookie(self, set_cookie_header):
if set_cookie_header:
for field in set_cookie_header.split(";"):
if field.startswith('hadoop.auth='):
return field
else:
return None
return None
def get_negotiate_value(self, auth_header):
if auth_header:
for field in auth_header.split(","):
key, __, value = field.strip().partition(" ")
if key.lower() == "negotiate":
return value.strip()
else:
return None
return None
| 34.763636 | 106 | 0.70537 | 714 | 5,736 | 5.568627 | 0.242297 | 0.027666 | 0.021127 | 0.028924 | 0.26006 | 0.216298 | 0.17505 | 0.116197 | 0.116197 | 0.057093 | 0 | 0.005045 | 0.205195 | 5,736 | 164 | 107 | 34.97561 | 0.867076 | 0.101464 | 0 | 0.382609 | 0 | 0 | 0.165563 | 0.009591 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.069565 | 0.06087 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
1cc47da867a0c62e2fccbf156f238bef90f00700 | 4,323 | py | Python | leetcode/H0032_Longest_Valid_Parentheses.py | jjmoo/daily | fb8cf0e64606a2a76a6141bb0e9ccd143c30f07c | [
"MIT"
] | 1 | 2020-03-27T16:42:02.000Z | 2020-03-27T16:42:02.000Z | leetcode/H0032_Longest_Valid_Parentheses.py | jjmoo/daily | fb8cf0e64606a2a76a6141bb0e9ccd143c30f07c | [
"MIT"
] | null | null | null | leetcode/H0032_Longest_Valid_Parentheses.py | jjmoo/daily | fb8cf0e64606a2a76a6141bb0e9ccd143c30f07c | [
"MIT"
] | null | null | null | from utils import Benchmark
class Solution:
def longestValidParenthesesTimeout(self, s: str) -> int:
if not s: return 0
def valid(start, end):
if start >= end: return True
cnt = 0
for i in range(start, end):
if '(' == s[i]: cnt += 1
elif 0 == cnt: return False
else: cnt -= 1
return 0 == cnt
n, max_l = len(s), 0
for i in range(n):
for j in range(n, i, -1):
if valid(i, j):
max_l = max(max_l, j - i)
break
return max_l
def longestValidParenthesesMy(self, s: str) -> int:
if not s: return 0
stack, l, max_l = [], 0, 0
for ch in s:
if '(' == ch:
stack.append(l)
l = 0
elif stack:
l += 2 + stack.pop()
max_l = max(max_l, l)
else:
l = 0
return max_l
def longestValidParenthesesDp(self, s: str) -> int:
if not s: return 0
n = len(s)
dp = [0] * n
for i in range(1, n):
if '(' == s[i]: continue
if '(' == s[i - 1]:
dp[i] = 2 + (0 if i < 2 else dp[i - 2])
else:
pair = i - 1 - dp[i - 1]
if pair >= 0 and '(' == s[pair]:
dp[i] = 2 + dp[i - 1] + (dp[pair - 1] if pair > 0 else 0)
return max(dp)
def longestValidParenthesesStack(self, s: str) -> int:
if not s: return 0
stack, max_l = [-1], 0
for i, ch in enumerate(s):
if '(' == ch:
stack.append(i)
else:
stack.pop()
if not stack: stack.append(i)
else: max_l = max(max_l, i - stack[-1])
return max_l
def longestValidParenthesesTwo(self, s: str) -> int:
if not s: return 0
max_l = 0
left, right = 0, 0
for ch in s:
if '(' == ch: left += 1
else: right += 1
if left == right: max_l = max(max_l, left + right)
elif left < right: left, right = 0, 0
left, right = 0, 0
for ch in reversed(s):
if '(' == ch: left += 1
else: right += 1
if left == right: max_l = max(max_l, left + right)
elif left > right: left, right = 0, 0
return max_l
with Benchmark('test'):
test = Solution().longestValidParenthesesTwo
print(2, test('()'))
print(4, test(')()())'))
print(2, test('()(()'))
print(6, test('))())(()))()(((()())(('))
print(8, test('(((()))()'))
print(490, test('))())(()))()(((()())(()(((()))))((()(())()((((()))())))())))()(()(()))))())(((())(()()))((())()())((()))(()(())(())((())((((()())()))((()(())()))()(()))))))()))(()))))()())()())()()()()()()()))()(((()()((()(())((()())))(()())))))))(()()(())())(()))))))()()())((((()()()())))))((())(())()()(()((()()))()()())(()())()))()(()(()())))))())()(())(()))(())()(())()((())()((((()()))())(((((())))())())(()((())((()()((((((())))(((())))))))(()()((((((()(((())()(()))(()())((()(((()((()(())())()())(((()))()(((()))))(())))(())()())()(((()))))((())())))())()()))((((()))(())()())()(((())(())(()()((())()())()()())())))((()())(()((()()()(()())(()))(()())((((()(()(((()(((())()((()(()))())()())))))))))))()())()(()(((())()))(((()))((((()())())(()())((()())(()()((()((((()())))()(())(())()))))(())())))))(((((((())(((((()))()))(()()()()))))))(()(()(()(()()(((()()))((()))())((())())()())()))()()(((())))()(())()()(())))(((()))))))))(())((()((()((()))))()()()((())((((((((((()(())))(())((()(()())())(((((((()()()()))())(((()())()(()()))))(()()))))(((()()((()()()(((()))))(()()())()()()(()))))()(())))))))()((((((((()((())))))))(()))()((()())())('))
# Given a string containing just the characters '(' and ')', find the length of the longest valid (well-formed) parentheses substring.
# Example 1:
# Input: "(()"
# Output: 2
# Explanation: The longest valid parentheses substring is "()"
# Example 2:
# Input: ")()())"
# Output: 4
# Explanation: The longest valid parentheses substring is "()()"
# 来源:力扣(LeetCode)
# 链接:https://leetcode-cn.com/problems/longest-valid-parentheses
# 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
| 40.783019 | 1,154 | 0.341892 | 413 | 4,323 | 3.535109 | 0.217918 | 0.049315 | 0.027397 | 0.037671 | 0.35 | 0.292466 | 0.292466 | 0.226712 | 0.191781 | 0.142466 | 0 | 0.019582 | 0.291233 | 4,323 | 105 | 1,155 | 41.171429 | 0.456919 | 0.101781 | 0 | 0.325 | 0 | 0 | 0.30646 | 0.297674 | 0 | 0 | 0 | 0 | 0 | 1 | 0.075 | false | 0 | 0.0125 | 0 | 0.175 | 0.075 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1cc9d0b33d1f1efcc70106837eb2caf8b3b8bb97 | 3,490 | py | Python | django/slides/models.py | jvacheesuthum/LiveLe-Webapp | 9a4b9281316c5beeb829c909f3aa5ccd738ecd71 | [
"Apache-2.0"
] | null | null | null | django/slides/models.py | jvacheesuthum/LiveLe-Webapp | 9a4b9281316c5beeb829c909f3aa5ccd738ecd71 | [
"Apache-2.0"
] | null | null | null | django/slides/models.py | jvacheesuthum/LiveLe-Webapp | 9a4b9281316c5beeb829c909f3aa5ccd738ecd71 | [
"Apache-2.0"
] | null | null | null | import json
from django.db import models
from django.contrib.auth.models import User, Group
from channels import Group as Channel_Group
from django.shortcuts import get_object_or_404
import os
from django.forms import ModelForm
from django import forms
import binascii
# Create your models here.
class Token(models.Model):
user = models.ForeignKey(User)
token = models.CharField(max_length=40, primary_key=True)
created = models.DateTimeField(auto_now_add=True)
def save(self, *args, **kwargs):
if not self.token:
self.token = self.generate_token()
return super(Token, self).save(*args, **kwargs)
def generate_token(self):
return binascii.hexlify(os.urandom(20)).decode()
def __unicode__(self):
return self.token
###############
def rename(instance, filename):
return '/'.join([filename])
class PDF(models.Model):
filename = models.CharField(max_length=200)
course = models.ForeignKey(Group, on_delete=models.CASCADE, default = 1)
lecturer = models.ForeignKey(User, on_delete=models.CASCADE, default = 1)
current_page = models.IntegerField(default = 1)
pdffile = models.FileField(upload_to=rename)
def __str__(self):
return self.filename
def save(self, *args, **kwargs):
# delete old file when replacing by updating the file
try:
this = PDF.objects.get(id=self.id)
if this.pdffile != self.pdffile:
this.pdffile.delete(save=False)
except: pass
super(PDF, self).save(*args, **kwargs)
class PDFForm(ModelForm):
class Meta:
model = PDF
fields = ['pdffile', 'filename']
class Current(models.Model):
owner = models.ForeignKey(User, on_delete=models.CASCADE, default = 1)
pdf = models.ForeignKey(PDF, on_delete=models.CASCADE, default = 1)
page = models.IntegerField()
active = models.IntegerField(default = 0)
def __str__(self):
return str(self.owner) +' '+ str(self.pdf) +' page'+ str(self.page) + (' active' if (self.active == 1) else ' inactive')
class Votes(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
pdf = models.ForeignKey(PDF, on_delete=models.CASCADE)
page = models.IntegerField()
value = models.IntegerField(default=0)
def __str__(self):
return str(self.user) +' '+ str(self.pdf) +' page'+ str(self.page) + (' happy' if (self.value == 0) else ' unhappy')
class Speed(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
pdf = models.ForeignKey(PDF, on_delete=models.CASCADE)
value = models.IntegerField(default=0)
def __str__(self):
return str(self.user) +' '+ str(self.pdf) + (' slow' if (self.value == 0) else ' fast')
class Question(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
pdf = models.ForeignKey(PDF, on_delete=models.CASCADE)
page = models.IntegerField()
text = models.TextField(verbose_name = 'Question')
def __str__(self):
return str(self.user) +' '+ str(self.pdf) +' page'+ str(self.page)+' ' + self.text
class Question_Vote(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
question = models.ForeignKey(Question, on_delete=models.CASCADE)
def __str__(self):
return str(self.user) +' likes '+ str(self.question)
class QuestionForm(ModelForm):
class Meta:
model = Question
fields = ['text']
| 32.924528 | 129 | 0.668195 | 444 | 3,490 | 5.128378 | 0.256757 | 0.05841 | 0.073781 | 0.110672 | 0.433904 | 0.401405 | 0.369785 | 0.346948 | 0.328063 | 0.285024 | 0 | 0.007538 | 0.201719 | 3,490 | 105 | 130 | 33.238095 | 0.809763 | 0.021777 | 0 | 0.282051 | 0 | 0 | 0.027974 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.141026 | false | 0.012821 | 0.115385 | 0.115385 | 0.846154 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
1ccfda4b387ee94472722aefdeaa2ccd8b70b6e0 | 612 | py | Python | stubs/micropython-esp8266-1_11/usocket.py | RonaldHiemstra/micropython-stubs | d97f879b01f6687baaebef1c7e26a80909c3cff3 | [
"MIT"
] | 38 | 2020-10-18T21:59:44.000Z | 2022-03-17T03:03:28.000Z | stubs/micropython-esp8266-1_11/usocket.py | RonaldHiemstra/micropython-stubs | d97f879b01f6687baaebef1c7e26a80909c3cff3 | [
"MIT"
] | 176 | 2020-10-18T14:31:03.000Z | 2022-03-30T23:22:39.000Z | stubs/micropython-esp8266-1_11/usocket.py | RonaldHiemstra/micropython-stubs | d97f879b01f6687baaebef1c7e26a80909c3cff3 | [
"MIT"
] | 6 | 2020-12-28T21:11:12.000Z | 2022-02-06T04:07:50.000Z | """
Module: 'usocket' on esp8266 v1.11
"""
# MCU: (sysname='esp8266', nodename='esp8266', release='2.2.0-dev(9422289)', version='v1.11-8-g48dcbbe60 on 2019-05-29', machine='ESP module with ESP8266')
# Stubber: 1.1.0
AF_INET = 2
AF_INET6 = 10
IPPROTO_IP = 0
IP_ADD_MEMBERSHIP = 1024
SOCK_DGRAM = 2
SOCK_RAW = 3
SOCK_STREAM = 1
SOL_SOCKET = 1
SO_REUSEADDR = 4
def callback():
pass
def getaddrinfo():
pass
def print_pcbs():
pass
def reset():
pass
class socket:
''
def accept():
pass
def bind():
pass
def close():
pass
def connect():
pass
| 14.571429 | 155 | 0.614379 | 90 | 612 | 4.055556 | 0.622222 | 0.115068 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.135076 | 0.25 | 612 | 41 | 156 | 14.926829 | 0.660131 | 0.333333 | 0 | 0.296296 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.296296 | false | 0.296296 | 0 | 0 | 0.333333 | 0.037037 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
1cd20317717a28af427b8416d01317283c36d7a0 | 186 | py | Python | python/Exercicios/ex027.py | Robert-Marchinhaki/primeiros-passos-Python | 515c2c418bfb941bd9af14cf598eca7fe2985592 | [
"MIT"
] | null | null | null | python/Exercicios/ex027.py | Robert-Marchinhaki/primeiros-passos-Python | 515c2c418bfb941bd9af14cf598eca7fe2985592 | [
"MIT"
] | null | null | null | python/Exercicios/ex027.py | Robert-Marchinhaki/primeiros-passos-Python | 515c2c418bfb941bd9af14cf598eca7fe2985592 | [
"MIT"
] | null | null | null | # PEGANDO O PRIMEIRO E O ÚLTIMO NOME DE UMA PESSOA
nome = input('Digite seu nome completo: ')
nnome = nome.split()
prinome = nnome[0]
ultinome = nnome[-1]
print(prinome)
print(ultinome) | 23.25 | 50 | 0.725806 | 29 | 186 | 4.655172 | 0.689655 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012739 | 0.155914 | 186 | 8 | 51 | 23.25 | 0.847134 | 0.258065 | 0 | 0 | 0 | 0 | 0.189781 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1cd3e4849d8142e040fa6b1290a6ff78c3af55b6 | 1,191 | py | Python | seqauto/migrations/0013_auto_20210324_1809.py | SACGF/variantgrid | 515195e2f03a0da3a3e5f2919d8e0431babfd9c9 | [
"RSA-MD"
] | 5 | 2021-01-14T03:34:42.000Z | 2022-03-07T15:34:18.000Z | seqauto/migrations/0013_auto_20210324_1809.py | SACGF/variantgrid | 515195e2f03a0da3a3e5f2919d8e0431babfd9c9 | [
"RSA-MD"
] | 551 | 2020-10-19T00:02:38.000Z | 2022-03-30T02:18:22.000Z | seqauto/migrations/0013_auto_20210324_1809.py | SACGF/variantgrid | 515195e2f03a0da3a3e5f2919d8e0431babfd9c9 | [
"RSA-MD"
] | null | null | null | # Generated by Django 3.1.3 on 2021-03-24 07:39
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('seqauto', '0012_auto_20210324_1702'),
]
operations = [
migrations.AddField(
model_name='seqautomessage',
name='code',
field=models.TextField(null=True),
),
migrations.AddField(
model_name='seqautomessage',
name='open',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='seqautomessage',
name='message',
field=models.TextField(),
),
migrations.AlterField(
model_name='seqautomessage',
name='record',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='seqauto.seqautorecord'),
),
migrations.AlterField(
model_name='seqautomessage',
name='seqauto_run',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='seqauto.seqautorun'),
),
]
| 29.775 | 120 | 0.592779 | 113 | 1,191 | 6.150442 | 0.424779 | 0.064748 | 0.165468 | 0.194245 | 0.548201 | 0.548201 | 0.215827 | 0.215827 | 0.215827 | 0.215827 | 0 | 0.036643 | 0.289673 | 1,191 | 39 | 121 | 30.538462 | 0.78487 | 0.037783 | 0 | 0.454545 | 1 | 0 | 0.149476 | 0.038462 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.060606 | 0 | 0.151515 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1ce5d1e5f0453defb7dfcdbf4981e39e4f55b35a | 281 | py | Python | python/pytest_fixtures/fizzbuzz.py | zkan/100DaysOfCode | 3c713ead94a9928e2d0f8d794e49ec202dc64ba3 | [
"MIT"
] | 2 | 2019-05-01T00:32:30.000Z | 2019-11-20T05:23:05.000Z | python/pytest_fixtures/fizzbuzz.py | zkan/100DaysOfCode | 3c713ead94a9928e2d0f8d794e49ec202dc64ba3 | [
"MIT"
] | 15 | 2020-09-05T18:35:04.000Z | 2022-03-11T23:44:47.000Z | python/pytest_fixtures/fizzbuzz.py | zkan/100DaysOfCode | 3c713ead94a9928e2d0f8d794e49ec202dc64ba3 | [
"MIT"
] | null | null | null | class FizzBuzz(object):
def say(self, number):
if number % 3 == 0 and number % 5 == 0:
return 'FizzBuzz'
elif number % 3 == 0:
return 'Fizz'
elif number % 5 == 0:
return 'Buzz'
else:
return number
| 25.545455 | 47 | 0.462633 | 32 | 281 | 4.0625 | 0.53125 | 0.161538 | 0.123077 | 0.215385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.050314 | 0.434164 | 281 | 10 | 48 | 28.1 | 0.767296 | 0 | 0 | 0 | 0 | 0 | 0.05694 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
1ce92dadef299f2261823526afda6860635d8f56 | 228 | py | Python | amicleaner/__init__.py | indyaah/aws-amicleaner | 718dbd889a2576b291dc211b82ffd784372869ba | [
"MIT"
] | null | null | null | amicleaner/__init__.py | indyaah/aws-amicleaner | 718dbd889a2576b291dc211b82ffd784372869ba | [
"MIT"
] | null | null | null | amicleaner/__init__.py | indyaah/aws-amicleaner | 718dbd889a2576b291dc211b82ffd784372869ba | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
__title__ = 'amicleaner'
__version__ = '0.2.0'
__short_version__ = '.'.join(__version__.split('.')[:2])
__author__ = 'Guy Rodrigue Koffi'
__author_email__ = 'koffirodrigue@gmail.com'
__license__ = 'MIT'
| 25.333333 | 56 | 0.692982 | 26 | 228 | 4.923077 | 0.807692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024752 | 0.114035 | 228 | 8 | 57 | 28.5 | 0.608911 | 0.092105 | 0 | 0 | 0 | 0 | 0.297561 | 0.112195 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1cead215f994c6682817b7b8139a2cd54b64423f | 823 | py | Python | dfvfs/resolver_helpers/apfs_container_resolver_helper.py | dfrc-korea/dfvfs | 7be70af72f56f4feadd50206e33b0f5024907473 | [
"Apache-2.0"
] | 1 | 2021-02-15T03:41:46.000Z | 2021-02-15T03:41:46.000Z | dfvfs/resolver_helpers/apfs_container_resolver_helper.py | dfrc-korea/dfvfs | 7be70af72f56f4feadd50206e33b0f5024907473 | [
"Apache-2.0"
] | null | null | null | dfvfs/resolver_helpers/apfs_container_resolver_helper.py | dfrc-korea/dfvfs | 7be70af72f56f4feadd50206e33b0f5024907473 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""The APFS container path specification resolver helper implementation."""
from dfvfs.lib import definitions
from dfvfs.resolver_helpers import manager
from dfvfs.resolver_helpers import resolver_helper
from dfvfs.vfs import apfs_container_file_system
class APFSContainerResolverHelper(resolver_helper.ResolverHelper):
"""APFS container resolver helper."""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_APFS_CONTAINER
def NewFileSystem(self, resolver_context):
"""Creates a new file system object.
Args:
resolver_context (Context): resolver context.
Returns:
APFSContainerFileSystem: file system.
"""
return apfs_container_file_system.APFSContainerFileSystem(resolver_context)
manager.ResolverHelperManager.RegisterHelper(APFSContainerResolverHelper())
| 29.392857 | 79 | 0.795869 | 86 | 823 | 7.418605 | 0.465116 | 0.101881 | 0.053292 | 0.075235 | 0.094044 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001397 | 0.130012 | 823 | 27 | 80 | 30.481481 | 0.889665 | 0.319563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.444444 | 0 | 0.888889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
1cf306e380e03da293f6ec81adde485bcdb99798 | 411 | py | Python | 17/00/Test_GC.py | pylangstudy/201709 | 53d868786d7327a83bfa7f4149549c6f9855a6c6 | [
"CC0-1.0"
] | null | null | null | 17/00/Test_GC.py | pylangstudy/201709 | 53d868786d7327a83bfa7f4149549c6f9855a6c6 | [
"CC0-1.0"
] | 32 | 2017-09-01T00:52:17.000Z | 2017-10-01T00:30:02.000Z | 17/00/Test_GC.py | pylangstudy/201709 | 53d868786d7327a83bfa7f4149549c6f9855a6c6 | [
"CC0-1.0"
] | null | null | null | import gc
from pprint import pprint
import weakref
from Test_CircularReference import Graph, demo, collect_and_show_garbage
gc.set_debug(gc.DEBUG_LEAK)
print('Setting up the cycle')
print()
demo(Graph)
print()
print('Breaking the cycle and cleaning up garbage')
print()
gc.garbage[0].set_next(None)#IndexError: list index out of range
while gc.garbage:
del gc.garbage[0]
print
collect_and_show_garbage()
| 20.55 | 72 | 0.790754 | 66 | 411 | 4.772727 | 0.515152 | 0.085714 | 0.088889 | 0.133333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005525 | 0.119221 | 411 | 19 | 73 | 21.631579 | 0.864641 | 0.085158 | 0 | 0.1875 | 0 | 0 | 0.165333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.25 | 0 | 0.25 | 0.4375 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
1cfd1b2218b6b9b5a2c2c08e5df261d3fb630d53 | 32,306 | py | Python | versions/WoT_1.10.1.0/battle_results_shared.py | KIHestad/WoT-Battle_Parser | 8dbf9baa8e8f6da2967fb20528bcb87c9c52ff05 | [
"MIT"
] | null | null | null | versions/WoT_1.10.1.0/battle_results_shared.py | KIHestad/WoT-Battle_Parser | 8dbf9baa8e8f6da2967fb20528bcb87c9c52ff05 | [
"MIT"
] | null | null | null | versions/WoT_1.10.1.0/battle_results_shared.py | KIHestad/WoT-Battle_Parser | 8dbf9baa8e8f6da2967fb20528bcb87c9c52ff05 | [
"MIT"
] | 2 | 2021-11-10T19:12:18.000Z | 2022-03-13T10:05:31.000Z | # uncompyle6 version 3.7.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.8 (default, Jun 30 2014, 16:08:48) [MSC v.1500 64 bit (AMD64)]
# Embedded file name: scripts/common/battle_results_shared.py
import struct
from itertools import izip
from battle_results_constants import BATTLE_RESULT_ENTRY_TYPE as ENTRY_TYPE
from battle_results_constants import VEHICLE_DEVICE_TYPE_NAMES, VEHICLE_TANKMAN_TYPE_NAMES, FLAG_ACTION
from dictpackers import *
from badges_common import BadgesCommon
def _buildMapsForExt(fields):
return (
Meta(fields),
tuple((v[0], v[2]) for v in fields), {v[0]:i for i, v in enumerate(fields)})
class PREMIUM_TYPE:
NONE = 0
BASIC = 1
PLUS = 2
VIP = 4
TYPES_SORTED = (
BASIC, PLUS, VIP)
ANY = BASIC | PLUS | VIP
AFFECTING_TYPES = PLUS | VIP
@classmethod
def activePremium(cls, premMask):
for premType in reversed(cls.TYPES_SORTED):
if premMask & premType:
return premType
return cls.NONE
@classmethod
def initialData(cls):
return {cls.BASIC: 0,
cls.PLUS: 0,
cls.VIP: 0,
'premMask': 0}
class PREM_BONUS_TYPES:
CREDITS = 0
XP = 1
TMEN_XP = 2
VEH_INTERACTION_DETAILS = (
('spotted', 'B', 1, 0),
('deathReason', 'b', 10, -1),
('directHits', 'H', 65535, 0),
('directEnemyHits', 'H', 65535, 0),
('explosionHits', 'H', 65535, 0),
('piercings', 'H', 65535, 0),
('piercingEnemyHits', 'H', 65535, 0),
('damageDealt', 'I', 4294967295L, 0),
('damageAssistedTrack', 'H', 65535, 0),
('damageAssistedRadio', 'H', 65535, 0),
('damageAssistedStun', 'H', 65535, 0),
('damageAssistedSmoke', 'H', 65535, 0),
('damageAssistedInspire', 'H', 65535, 0),
('crits', 'I', 4294967295L, 0),
('fire', 'H', 65535, 0),
('stunNum', 'H', 65535, 0),
('stunDuration', 'f', 65535.0, 0.0),
('damageBlockedByArmor', 'I', 4294967295L, 0),
('damageReceived', 'H', 65535, 0),
('rickochetsReceived', 'H', 65535, 0),
('noDamageDirectHitsReceived', 'H', 65535, 0),
('targetKills', 'B', 255, 0))
VEH_INTERACTION_DETAILS_NAMES = [ x[0] for x in VEH_INTERACTION_DETAILS ]
VEH_INTERACTION_DETAILS_MAX_VALUES = dict((x[0], x[2]) for x in VEH_INTERACTION_DETAILS)
VEH_INTERACTION_DETAILS_INIT_VALUES = [ x[3] for x in VEH_INTERACTION_DETAILS ]
VEH_INTERACTION_DETAILS_LAYOUT = ('').join([ x[1] for x in VEH_INTERACTION_DETAILS ])
VEH_INTERACTION_DETAILS_INDICES = dict((x[1][0], x[0]) for x in enumerate(VEH_INTERACTION_DETAILS))
VEH_INTERACTION_DETAILS_TYPES = dict((x[0], x[1]) for x in VEH_INTERACTION_DETAILS)
VEH_INTERACTIVE_STATS = ('xp', 'damageDealt', 'capturePts', 'flagActions', 'winPoints',
'deathCount', 'resourceAbsorbed', 'stopRespawn', 'equipmentDamage',
'equipmentKills')
VEH_INTERACTIVE_STATS_INDICES = dict((x[1], x[0]) for x in enumerate(VEH_INTERACTIVE_STATS))
AVATAR_PRIVATE_STATS = ('ragePoints', )
AVATAR_PRIVATE_STATS_INDICES = dict((x[1], x[0]) for x in enumerate(AVATAR_PRIVATE_STATS))
_PREM_TYPE_TO_FACTOR100_NAMES = {PREM_BONUS_TYPES.CREDITS: {PREMIUM_TYPE.BASIC: 'premiumCreditsFactor100',
PREMIUM_TYPE.PLUS: 'premiumPlusCreditsFactor100',
PREMIUM_TYPE.VIP: 'premiumVipCreditsFactor100'},
PREM_BONUS_TYPES.XP: {PREMIUM_TYPE.BASIC: 'premiumXPFactor100',
PREMIUM_TYPE.PLUS: 'premiumPlusXPFactor100',
PREMIUM_TYPE.VIP: 'premiumVipXPFactor100'},
PREM_BONUS_TYPES.TMEN_XP: {PREMIUM_TYPE.BASIC: 'premiumTmenXPFactor100',
PREMIUM_TYPE.PLUS: 'premiumPlusTmenXPFactor100',
PREMIUM_TYPE.VIP: 'premiumVipXPTmenFactor100'}}
_PRIVATE_EVENT_RESULTS = Meta([
('eventCredits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventFreeXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventTMenXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventGold', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventCrystal', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventEventCoin', int, 0, None, 'sum', ENTRY_TYPE.COMMON)])
_AVATAR_CELL_RESULTS_PRIVATE = Meta([
('avatarAmmo', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('avatarDamageEventList', set, set(), None, 'skip', ENTRY_TYPE.COMMON)])
_AVATAR_CELL_RESULTS_SERVER = Meta([
('avatarAmmoEquipped', set, set(), None, 'skip', ENTRY_TYPE.COMMON)])
_AVATAR_CELL_RESULTS_PUBLIC = Meta([
('avatarDamageDealt', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('avatarKills', int, 0, None, 'skip', ENTRY_TYPE.COMMON)])
_AVATAR_BASE_SERVER_RESULTS = Meta([
('cybersportRatingDeltas', tuple, (0.0, 0.0), None, 'skip', ENTRY_TYPE.COMMON),
('vehRankRaised', int, 0, None, 'skip', ENTRY_TYPE.COMMON)])
_AVATAR_BASE_PRIVATE_RESULTS = Meta([
('accountDBID', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('team', int, 1, None, 'skip', ENTRY_TYPE.COMMON),
('clanDBID', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('fortClanDBIDs', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('winnerIfDraw', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('isPrematureLeave', bool, False, None, 'skip', ENTRY_TYPE.COMMON),
('watchedBattleToTheEnd', bool, False, None, 'skip', ENTRY_TYPE.COMMON),
('squadBonusInfo', None, None,None, 'skip', ENTRY_TYPE.COMMON),
('progressiveReward',None, None,None, 'skip', ENTRY_TYPE.COMMON),
('rankChange', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('updatedRankChange', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('accRank', tuple, (0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('vehRank', tuple, (0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('prevAccRank', tuple, (0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('prevMaxRank', tuple, (0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('prevVehRank', tuple, (0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('shields', dict, {}, None, 'skip', ENTRY_TYPE.COMMON),
('prevShields', dict, {}, None, 'skip', ENTRY_TYPE.COMMON),
('rankedSeason', tuple, (0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('rankedSeasonNum', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('bonusBattleUsed', bool, False, None, 'skip', ENTRY_TYPE.COMMON),
('efficiencyBonusBattles', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('stepsBonusBattles', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('eligibleForCrystalRewards', bool, False, None, 'skip', ENTRY_TYPE.COMMON),
('activeRents', dict, {}, None, 'skip', ENTRY_TYPE.COMMON),
('recruitsIDs', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('recruiterID', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('fareTeamXPPosition', int, 0, None, 'skip', ENTRY_TYPE.COMMON)])
_AVATAR_BASE_PUBLIC_RESULTS_EXTS = {
'playerRank': _buildMapsForExt([
('rank', int, 0, None, 'skip', ENTRY_TYPE.COMMON)]),
'epicMetaGame': _buildMapsForExt([
('creditsAfterShellCosts', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('unchargedShellCosts', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('prevMetaLevel', tuple, (0, 1, 0), None, 'skip', ENTRY_TYPE.COMMON),
('metaLevel', tuple, (0, 1, 0), None, 'skip', ENTRY_TYPE.COMMON),
('flXP', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('originalFlXP', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('subtotalFlXP', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('boosterFlXP', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('boosterFlXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('flXPReplay', str, '', ValueReplayPacker(), 'skip', ENTRY_TYPE.COMMON)]),
'battlePass': _buildMapsForExt([
('basePointsDiff', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('sumPoints', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('hasBattlePass', bool, False, None, 'skip', ENTRY_TYPE.COMMON)])}
_AVATAR_BASE_PUBLIC_RESULTS = Meta([
('avatarDamaged', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('totalDamaged', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('fairplayViolations', tuple, (0, 0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('prevAccRank', tuple, (0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('badges', tuple, BadgesCommon.selectedBadgesEmpty(), None, 'skip', ENTRY_TYPE.COMMON),
('ext', dict, {}, BunchProxyPacker(_AVATAR_BASE_PUBLIC_RESULTS_EXTS), 'joinExts', ENTRY_TYPE.COMMON)])
_AVATAR_FULL_RESULTS_PRIVATE = Meta([
('questsProgress', dict, {}, None, 'skip', ENTRY_TYPE.COMMON),
('PM2Progress', dict, {}, None, 'skip', ENTRY_TYPE.COMMON)])
_AVATAR_DELETE_ME = Meta([
('credits', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('xp', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('freeXP', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('crystal', int, 0, None, 'skip', ENTRY_TYPE.COMMON)])
AVATAR_CELL_RESULTS = _AVATAR_CELL_RESULTS_PUBLIC + _AVATAR_CELL_RESULTS_PRIVATE + _AVATAR_CELL_RESULTS_SERVER
AVATAR_BASE_RESULTS = AVATAR_CELL_RESULTS + _AVATAR_BASE_PUBLIC_RESULTS + _AVATAR_BASE_SERVER_RESULTS + _AVATAR_BASE_PRIVATE_RESULTS
AVATAR_PUBLIC_RESULTS = _AVATAR_CELL_RESULTS_PUBLIC + _AVATAR_BASE_PUBLIC_RESULTS
AVATAR_FULL_RESULTS = _AVATAR_CELL_RESULTS_PUBLIC + _AVATAR_CELL_RESULTS_PRIVATE + _AVATAR_BASE_PUBLIC_RESULTS + _AVATAR_BASE_PRIVATE_RESULTS + _AVATAR_FULL_RESULTS_PRIVATE + _PRIVATE_EVENT_RESULTS + _AVATAR_DELETE_ME
PLAYER_INFO = [
('accountDBID', int, 0, None, 'any', ENTRY_TYPE.ACCOUNT_SELF),
('team', int, 1, None, 'skip', ENTRY_TYPE.ACCOUNT_SELF),
('clanDBID', int, 0, None, 'skip', ENTRY_TYPE.ACCOUNT_SELF),
('fortClanDBIDs', list, [], None, 'skip', ENTRY_TYPE.ACCOUNT_SELF),
('prebattleID', int, 0, None, 'skip', ENTRY_TYPE.PLAYER_INFO),
('team', int, 1, None, 'skip', ENTRY_TYPE.PLAYER_INFO),
('igrType', int, 0, None, 'skip', ENTRY_TYPE.PLAYER_INFO)]
PLAYER_INFO_META = Meta(PLAYER_INFO)
VEH_CELL_RESULTS_EXTS = {'extPublic': {
'recoveryMechanic': _buildMapsForExt([
('numRecovered', int, 0, None, 'sum', ENTRY_TYPE.COMMON)]),
'sector': _buildMapsForExt([
('numCaptured', int, 0, None, 'sum', ENTRY_TYPE.COMMON)]),
'destructibleEntity': _buildMapsForExt([
('numDestroyed', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageDealt', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('hits', int, 0, None, 'sum', ENTRY_TYPE.COMMON)]),
'defenderBonus': _buildMapsForExt([
('numDefended', int, 0, None, 'sum', ENTRY_TYPE.COMMON)])},
'extPrivate': {}, 'extServer': {
'achievementsData': _buildMapsForExt([
('ironShieldDamage', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('occupyingForceDestruction', bool, False, None, 'max', ENTRY_TYPE.COMMON),
('occupyingForceBasePoints', int, 0, None, 'sum', ENTRY_TYPE.COMMON)])}}
_VEH_CELL_RESULTS_PUBLIC = Meta([
('health', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('maxHealth', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('credits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('xp', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('xp/attack', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('xp/assist', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('xp/other', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('xpPenalty', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('achievementCredits', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('achievementXP', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('achievementFreeXP', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('shots', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('directHits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('directTeamHits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('explosionHits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('piercings', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageDealt', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('sniperDamageDealt', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('equipmentDamageDealt', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageAssistedRadio', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageAssistedTrack', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageAssistedStun', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageAssistedSmoke', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageAssistedInspire', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('stunNum', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('stunDuration', float, 0.0, None, 'sum', ENTRY_TYPE.COMMON),
('damageReceived', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageReceivedFromInvisibles', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageBlockedByArmor', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('directHitsReceived', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('noDamageDirectHitsReceived', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('explosionHitsReceived', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('piercingsReceived', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('tdamageDealt', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('tdestroyedModules', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('tkills', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('isTeamKiller', bool, False, None, 'max', ENTRY_TYPE.COMMON),
('capturePoints', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('capturingBase', None, None, None, 'any', ENTRY_TYPE.COMMON),
('droppedCapturePoints', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('mileage', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('lifeTime', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('killerID', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('achievements', list, [], None, 'extend', ENTRY_TYPE.COMMON),
('potentialDamageReceived', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('rolloutsCount', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('deathCount', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('flagActions', list, [0] * len(FLAG_ACTION.RANGE), None, 'sumInEachPos', ENTRY_TYPE.COMMON),
('soloFlagCapture', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('flagCapture', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('winPoints', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('resourceAbsorbed', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('stopRespawn', bool, False, None, 'max', ENTRY_TYPE.COMMON),
('extPublic', dict, {}, BunchProxyPacker(VEH_CELL_RESULTS_EXTS['extPublic']), 'joinExts', ENTRY_TYPE.COMMON)])
_VEH_CELL_RESULTS_PRIVATE = Meta([
('repair', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('freeXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('details', None, '', None, 'skip', ENTRY_TYPE.COMMON),
('extPrivate', dict, {}, BunchProxyPacker(VEH_CELL_RESULTS_EXTS['extPrivate']), 'joinExts', ENTRY_TYPE.COMMON)])
_VEH_CELL_RESULTS_SERVER = Meta([
('canStun', bool, False, None, 'any', ENTRY_TYPE.COMMON),
('potentialDamageDealt', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('soloHitsAssisted', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('isEnemyBaseCaptured', bool, False, None, 'max', ENTRY_TYPE.COMMON),
('stucks', list, [], DeltaPacker(roundToInt), 'extend', ENTRY_TYPE.COMMON),
('autoAimedShots', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('presenceTime', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('spotList', list, [], None, 'extend', ENTRY_TYPE.COMMON),
('ammo', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('crewActivityFlags', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('series', dict, {}, None, 'skip', ENTRY_TYPE.COMMON),
('tkillRating', float, 0.0, None, 'sum', ENTRY_TYPE.COMMON),
('thitPenalties', dict, {}, None, 'joinTHitPenalties', ENTRY_TYPE.COMMON),
('destroyedObjects', dict, {}, None, 'sumByEackKey', ENTRY_TYPE.COMMON),
('discloseShots', list, [], DeltaPacker(), 'extend', ENTRY_TYPE.COMMON),
('critsCount', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('aimerSeries', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('observedByEnemyTime', int, -1, None, 'any', ENTRY_TYPE.COMMON),
('critsByType', dict, {},
DictPacker([
('destroyed', dict, {}, SimpleDictPacker(int, VEHICLE_DEVICE_TYPE_NAMES), 'skip', ENTRY_TYPE.COMMON),
('critical', dict, {}, SimpleDictPacker(int, VEHICLE_DEVICE_TYPE_NAMES), 'skip', ENTRY_TYPE.COMMON),
('tankman', dict, {}, SimpleDictPacker(int, VEHICLE_TANKMAN_TYPE_NAMES), 'skip', ENTRY_TYPE.COMMON)]),
'joinCritsByType', ENTRY_TYPE.COMMON),
('innerModuleCritCount', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('innerModuleDestrCount', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('isAnyOurCrittedInnerModules', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('killsAssistedTrack', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('killsAssistedRadio', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('killsAssistedStun', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damagedVehicleCntAssistedTrack', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damagedVehicleCntAssistedRadio', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damagedVehicleCntAssistedStun', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('isNotSpotted', bool, True, None, 'max', ENTRY_TYPE.COMMON),
('isAnyHitReceivedWhileCapturing', bool, False, None, 'max', ENTRY_TYPE.COMMON),
('damageAssistedRadioWhileInvisible', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageAssistedTrackWhileInvisible', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageAssistedStunWhileInvisible', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageEventList', dict, {}, None, 'joinTargetEventLists', ENTRY_TYPE.COMMON),
('stunEventList', dict, {}, None, 'joinTargetEventLists', ENTRY_TYPE.COMMON),
('assistEventList', dict, {}, None, 'joinTargetEventLists', ENTRY_TYPE.COMMON),
('damageFromEnemiesEventList', dict, {}, None, 'joinTargetEventLists', ENTRY_TYPE.COMMON),
('multiDamageEvents', dict, {}, None, 'joinDicts', ENTRY_TYPE.COMMON),
('multiStunEvents', dict, {}, None, 'joinDicts', ENTRY_TYPE.COMMON),
('inBattleMaxSniperSeries', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('inBattleMaxKillingSeries', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('inBattleMaxPiercingSeries', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('firstDamageTime', int, 0, None, 'min', ENTRY_TYPE.COMMON),
('consumedAmmo', None, None, None, 'skip', ENTRY_TYPE.COMMON),
('extServer', dict, {}, BunchProxyPacker(VEH_CELL_RESULTS_EXTS['extServer']), 'joinExts', ENTRY_TYPE.COMMON),
('directEnemyHits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('explosionEnemyHits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('piercingEnemyHits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('indirectEnemyHits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('enemyHits', int, 0, None, 'sum', ENTRY_TYPE.COMMON)])
VEH_CELL_RESULTS = _VEH_CELL_RESULTS_PUBLIC + _VEH_CELL_RESULTS_PRIVATE + _VEH_CELL_RESULTS_SERVER
_VEH_BASE_RESULTS_PUBLIC = Meta([
('accountDBID', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('typeCompDescr', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('index', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('deathReason', int, -1, None, 'skip', ENTRY_TYPE.COMMON),
('team', int, 1, None, 'skip', ENTRY_TYPE.COMMON),
('kills', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('spotted', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damaged', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('stunned', int, 0, None, 'sum', ENTRY_TYPE.COMMON)])
_VEH_BASE_RESULTS_PRIVATE = Meta([
('xpPenalty', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('creditsPenalty', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('creditsContributionIn', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('creditsContributionOut', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('originalCreditsToDraw', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('creditsToDraw', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageBeforeTeamWasDamaged', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('killsBeforeTeamWasDamaged', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('percentFromTotalTeamDamage', float, 0.0, None, 'sum', ENTRY_TYPE.COMMON),
('percentFromSecondBestDamage', float, 0.0, None, 'sum', ENTRY_TYPE.COMMON),
('killedAndDamagedByAllSquadmates', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('damagedWhileMoving', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damagedWhileEnemyMoving', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('committedSuicide', bool, False, None, 'max', ENTRY_TYPE.COMMON),
('crystal', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventCoin', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('piggyBank', int, 0, None, 'sum', ENTRY_TYPE.COMMON)])
_VEH_BASE_RESULTS_SERVER = Meta([
('spottedBeforeWeBecameSpotted', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('spottedAndDamagedSPG', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('damageList', list, [], None, 'extend', ENTRY_TYPE.COMMON),
('killList', list, [], None, 'extend', ENTRY_TYPE.COMMON),
('vehLockTimeFactor', float, 0.0, None, 'skip', ENTRY_TYPE.COMMON),
('misc', dict, {}, None, 'any', ENTRY_TYPE.COMMON),
('vehsByClass', dict, {}, None, 'any', ENTRY_TYPE.COMMON)])
VEH_FULL_RESULTS_UPDATE = Meta([
('originalCredits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('creditsReplay', str, '', ValueReplayPacker(), 'skip', ENTRY_TYPE.COMMON),
('originalXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('xpReplay', str, '', ValueReplayPacker(), 'skip', ENTRY_TYPE.COMMON),
('originalFreeXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('freeXPReplay', str, '', ValueReplayPacker(), 'skip', ENTRY_TYPE.COMMON),
('originalTMenXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('tmenXPReplay', str, '', ValueReplayPacker(), 'skip', ENTRY_TYPE.COMMON),
('tmenXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('originalGold', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('goldReplay', str, '', ValueReplayPacker(), 'skip', ENTRY_TYPE.COMMON),
('gold', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('originalCrystal', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('crystalReplay', str, '', ValueReplayPacker(), 'skip', ENTRY_TYPE.COMMON),
('originalEventCoin', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventCoinReplay', str, '', ValueReplayPacker(), 'skip', ENTRY_TYPE.COMMON),
('factualXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('factualFreeXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('factualCredits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('subtotalCredits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('subtotalXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('subtotalFreeXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('subtotalTMenXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('subtotalGold', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('subtotalCrystal', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('subtotalEventCoin', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('eventCreditsList', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventXPList', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventFreeXPList', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventTMenXPList', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventGoldList', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventCrystalList', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventEventCoinList', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventCreditsFactor100List', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventXPFactor100List', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventFreeXPFactor100List', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventTMenXPFactor100List', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('eventGoldFactor100List', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('originalXPPenalty', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('originalCreditsPenalty', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('originalCreditsContributionIn', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('originalCreditsContributionOut', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('premiumVehicleXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('premiumVehicleXPFactor100', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('squadXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('squadXPFactor100', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('referral20XP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('referral20XPFactor100', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('referral20Credits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('referral20CreditsFactor100', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('premiumXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('premiumPlusXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('appliedPremiumXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('premiumTmenXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('premiumPlusTmenXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('appliedPremiumTmenXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('premiumCreditsFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('premiumPlusCreditsFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('appliedPremiumCreditsFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('premSquadCreditsFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('originalPremSquadCredits', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('premSquadCredits', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('dailyXPFactor10', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('additionalXPFactor10', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('igrXPFactor10', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('aogasFactor10', int, 0, None, 'max', ENTRY_TYPE.COMMON),
('refSystemXPFactor10', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('fairplayFactor10', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('orderCredits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('orderXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('orderFreeXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('orderTMenXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('orderCreditsFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('orderXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('orderFreeXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('orderTMenXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('boosterCredits', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('boosterXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('boosterFreeXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('boosterTMenXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('boosterCreditsFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('boosterXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('boosterFreeXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('boosterTMenXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('playerRankXP', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('playerRankXPFactor100', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('isPremium', bool, False, None, 'any', ENTRY_TYPE.COMMON),
('premMask', int, 0, None, 'any', ENTRY_TYPE.COMMON),
('xpByTmen', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('autoRepairCost', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('autoLoadCost', tuple, (0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('autoEquipCost', tuple, (0, 0, 0), None, 'skip', ENTRY_TYPE.COMMON),
('prevMarkOfMastery', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('markOfMastery', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('dossierPopUps', list, [], None, 'skip', ENTRY_TYPE.COMMON),
('vehTypeLockTime', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('serviceProviderID', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('marksOnGun', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('movingAvgDamage', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('damageRating', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('battleNum', int, 0, None, 'skip', ENTRY_TYPE.COMMON)]) + _PRIVATE_EVENT_RESULTS
_VEH_FULL_RESULTS_PRIVATE = Meta([
('questsProgress', dict, {}, None, 'joinDicts', ENTRY_TYPE.COMMON),
('c11nProgress', dict, {}, None, 'skip', ENTRY_TYPE.COMMON),
('originalCreditsToDrawSquad', int, 0, None, 'sum', ENTRY_TYPE.COMMON),
('originalCreditsPenaltySquad', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('originalCreditsContributionInSquad', int, 0, None, 'skip', ENTRY_TYPE.COMMON),
('originalCreditsContributionOutSquad', int, 0, None, 'sum', ENTRY_TYPE.COMMON)])
VEH_FULL_RESULTS_SERVER = Meta([
('eventGoldByEventID', dict, {}, None, 'skip', ENTRY_TYPE.COMMON)])
VEH_BASE_RESULTS = _VEH_CELL_RESULTS_PUBLIC + _VEH_BASE_RESULTS_PUBLIC + _VEH_CELL_RESULTS_PRIVATE + _VEH_BASE_RESULTS_PRIVATE + _VEH_CELL_RESULTS_SERVER + _VEH_BASE_RESULTS_SERVER
VEH_PUBLIC_RESULTS = _VEH_CELL_RESULTS_PUBLIC + _VEH_BASE_RESULTS_PUBLIC
VEH_FULL_RESULTS = _VEH_CELL_RESULTS_PUBLIC + _VEH_BASE_RESULTS_PUBLIC + _VEH_CELL_RESULTS_PRIVATE + _VEH_BASE_RESULTS_PRIVATE + VEH_FULL_RESULTS_UPDATE + _VEH_FULL_RESULTS_PRIVATE
VEH_PUBLIC_RESULTS = _VEH_CELL_RESULTS_PUBLIC + _VEH_BASE_RESULTS_PUBLIC
class UNIT_CLAN_MEMBERSHIP:
NONE = 0
ANY = 1
SAME = 2
def dictToList(indices, d):
l = [
None] * len(indices)
for name, index in indices.iteritems():
l[index] = d[name]
return l
def listToDict(names, l):
d = {}
for x in enumerate(names):
d[x[1]] = l[x[0]]
return d
class _VehicleInteractionDetailsItem(object):
@staticmethod
def __fmt2py(format):
if format in ('f', ):
return float
return int
def __init__(self, values, offset):
self.__values = values
self.__offset = offset
def __getitem__(self, key):
return self.__values[(self.__offset + VEH_INTERACTION_DETAILS_INDICES[key])]
def __setitem__(self, key, value):
self.__values[self.__offset + VEH_INTERACTION_DETAILS_INDICES[key]] = min(self.__fmt2py(VEH_INTERACTION_DETAILS_TYPES[key])(value), VEH_INTERACTION_DETAILS_MAX_VALUES[key])
def __str__(self):
return str(dict(self))
def __iter__(self):
return izip(VEH_INTERACTION_DETAILS_NAMES, self.__values[self.__offset:])
class VehicleInteractionDetails(object):
def __init__(self, uniqueVehIDs, values):
self.__uniqueVehIDs = uniqueVehIDs
self.__values = values
size = len(VEH_INTERACTION_DETAILS)
self.__offsets = dict((x[1], x[0] * size) for x in enumerate(uniqueVehIDs))
@staticmethod
def fromPacked(packed):
count = len(packed) / struct.calcsize(('').join(['<2I', VEH_INTERACTION_DETAILS_LAYOUT]))
packedVehIDsLayout = '<%dI' % (2 * count,)
packedVehIDsLen = struct.calcsize(packedVehIDsLayout)
flatIDs = struct.unpack(packedVehIDsLayout, packed[:packedVehIDsLen])
uniqueVehIDs = []
for i in xrange(0, len(flatIDs), 2):
uniqueVehIDs.append((flatIDs[i], flatIDs[(i + 1)]))
values = struct.unpack('<' + VEH_INTERACTION_DETAILS_LAYOUT * count, packed[packedVehIDsLen:])
return VehicleInteractionDetails(uniqueVehIDs, values)
def __getitem__(self, uniqueVehID):
if not isinstance(uniqueVehID, tuple):
raise UserWarning(('Argument uniqueVehID should be tuple: {}').format(uniqueVehID))
offset = self.__offsets.get(uniqueVehID, None)
if offset is None:
self.__uniqueVehIDs.append(uniqueVehID)
offset = len(self.__values)
self.__values += VEH_INTERACTION_DETAILS_INIT_VALUES
self.__offsets[uniqueVehID] = offset
return _VehicleInteractionDetailsItem(self.__values, offset)
def __contains__(self, uniqueVehID):
if not isinstance(uniqueVehID, tuple):
raise UserWarning(('Argument uniqueVehID should be tuple: {}').format(uniqueVehID))
return uniqueVehID in self.__offsets
def __str__(self):
return str(self.toDict())
def pack(self):
count = len(self.__uniqueVehIDs)
flatIDs = []
for uniqueID in self.__uniqueVehIDs:
flatIDs.append(uniqueID[0])
flatIDs.append(uniqueID[1])
try:
packed = struct.pack(('<%dI' % (2 * count)), *flatIDs) + struct.pack(('<' + VEH_INTERACTION_DETAILS_LAYOUT * count), *self.__values)
except Exception as e:
#from debug_utils import LOG_ERROR
#LOG_ERROR('PACKING EXCEPTION', e, str(self))
packed = ''
return packed
def toDict(self):
return dict([ ((vehID, vehIdx), dict(_VehicleInteractionDetailsItem(self.__values, offset))) for (vehID, vehIdx), offset in self.__offsets.iteritems()
]) | 53.843333 | 217 | 0.671485 | 3,836 | 32,306 | 5.438478 | 0.142857 | 0.145815 | 0.236555 | 0.084124 | 0.544099 | 0.523248 | 0.428626 | 0.130429 | 0.104688 | 0.069073 | 0 | 0.023207 | 0.151706 | 32,306 | 600 | 218 | 53.843333 | 0.738041 | 0.008667 | 0 | 0.044776 | 0 | 0 | 0.223766 | 0.058869 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.003731 | 0.011194 | null | null | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e8017922857077673165e16e7b2111a57d751eea | 93 | py | Python | Chapter 05/Praktikum-2/1-n.py | icaksh/Python-Projects-Protek | dfd56ea5afc637a8850911a9296131652de383c5 | [
"MIT"
] | null | null | null | Chapter 05/Praktikum-2/1-n.py | icaksh/Python-Projects-Protek | dfd56ea5afc637a8850911a9296131652de383c5 | [
"MIT"
] | null | null | null | Chapter 05/Praktikum-2/1-n.py | icaksh/Python-Projects-Protek | dfd56ea5afc637a8850911a9296131652de383c5 | [
"MIT"
] | null | null | null | banyakPerulangan = 10
i = 0
while (i < banyakPerulangan):
print('Hello World')
i += 1 | 18.6 | 29 | 0.634409 | 12 | 93 | 4.916667 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056338 | 0.236559 | 93 | 5 | 30 | 18.6 | 0.774648 | 0 | 0 | 0 | 0 | 0 | 0.117021 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.2 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e802f8f01dfd7adb2ba84ce96a8a24fa874ed6aa | 915 | py | Python | gnocchi/tests/test_bin.py | Dmitry-Eremeev/gnocchi | 1bfef1c3436778cf816370a94aa5589a93892bc2 | [
"Apache-2.0"
] | 299 | 2017-05-18T17:48:09.000Z | 2022-03-31T09:22:27.000Z | gnocchi/tests/test_bin.py | Dmitry-Eremeev/gnocchi | 1bfef1c3436778cf816370a94aa5589a93892bc2 | [
"Apache-2.0"
] | 981 | 2017-05-18T16:36:49.000Z | 2022-03-31T20:29:12.000Z | gnocchi/tests/test_bin.py | Dmitry-Eremeev/gnocchi | 1bfef1c3436778cf816370a94aa5589a93892bc2 | [
"Apache-2.0"
] | 89 | 2017-05-19T16:20:09.000Z | 2022-03-30T20:58:52.000Z | # -*- encoding: utf-8 -*-
#
# Copyright © 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import subprocess
from gnocchi.tests import base
class BinTestCase(base.BaseTestCase):
def test_gnocchi_config_generator_run(self):
with open(os.devnull, 'w') as f:
subp = subprocess.Popen(['gnocchi-config-generator'], stdout=f)
self.assertEqual(0, subp.wait())
| 33.888889 | 75 | 0.726776 | 135 | 915 | 4.903704 | 0.688889 | 0.090634 | 0.039275 | 0.048338 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013351 | 0.181421 | 915 | 26 | 76 | 35.192308 | 0.869159 | 0.630601 | 0 | 0 | 0 | 0 | 0.07764 | 0.074534 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.125 | false | 0 | 0.375 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
e8191bfdde88e97b7628d90043c31590174ded7b | 5,490 | py | Python | Notebook/malaria_detection_using_cnn.py | sayannath/Malaria-Detection-CNN | a3b4e592ebdd3112bc32f2ea5d68cbacc0d9e9b1 | [
"MIT"
] | 3 | 2020-09-23T13:58:58.000Z | 2021-03-05T16:15:00.000Z | Notebook/malaria_detection_using_cnn.py | sayannath/Malaria-Detection-CNN | a3b4e592ebdd3112bc32f2ea5d68cbacc0d9e9b1 | [
"MIT"
] | 1 | 2020-10-02T14:25:45.000Z | 2020-10-02T14:25:45.000Z | Notebook/malaria_detection_using_cnn.py | sayannath/Malaria-Detection-CNN | a3b4e592ebdd3112bc32f2ea5d68cbacc0d9e9b1 | [
"MIT"
] | 4 | 2020-10-01T02:31:04.000Z | 2021-05-22T20:59:18.000Z | # -*- coding: utf-8 -*-
"""Malaria Detection using CNN.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1S_oDkWVKPvQN3d_dBDD0Doytyqz7EB23
# Working with Custom Images for Malaria Detection
Acknowledgements
This Dataset is taken from the official NIH Website: https://ceb.nlm.nih.gov/repositories/malaria-datasets/
### **Importing the Dataset**
"""
from google.colab import drive
drive.mount('/content/gdrive')
!unzip "/content/gdrive/My Drive/cell_images.zip"
"""### Importing the libraries"""
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib.image as imread
"""Setting the directory of test and train images"""
test_path = 'cell_images/test/'
train_path = 'cell_images/train/'
import tensorflow
print(tensorflow.__version__)
from IPython.display import clear_output
from tensorflow.keras.optimizers import SGD, Adam, Adadelta
from tensorflow.keras.layers import Dense, Flatten, Conv2D, MaxPooling2D, BatchNormalization ,Dropout
from tensorflow.keras.models import Sequential
from tensorflow.keras.callbacks import Callback
from tensorflow.keras.preprocessing.image import ImageDataGenerator
image_size = (130, 130) #Image size
datagen = ImageDataGenerator(
rotation_range=20,
width_shift_range=0.10, # Shift the pic width by a max of 5%
height_shift_range=0.10, # Shift the pic height by a max of 5%
rescale=1/255, # Rescale the image by normalzing it.
shear_range=0.1, # Shear means cutting away part of the image (max 10%)
zoom_range=0.1, # Zoom in by 10% max
horizontal_flip=True, # Allo horizontal flipping
fill_mode='nearest' # Fill in missing pixels with the nearest filled value
)
train_gen = datagen.flow_from_directory(
train_path,
target_size=image_size,
batch_size=16,
color_mode='rgb',
class_mode='binary'
)
validation_gen = datagen.flow_from_directory(
test_path,
target_size=image_size,
batch_size=16,
class_mode='binary',
shuffle=False,
color_mode='rgb'
)
train_gen.class_indices #Lables
class PlotLearning(Callback):
def on_train_begin(self, logs={}):
self.i = 0
self.x = []
self.losses = []
self.val_losses = []
self.acc = []
self.val_acc = []
self.fig = plt.figure()
self.logs = []
def on_epoch_end(self, epoch, logs={}):
self.logs.append(logs)
self.x.append(self.i)
self.losses.append(logs.get('loss'))
self.val_losses.append(logs.get('val_loss'))
self.acc.append(logs.get('acc'))
self.val_acc.append(logs.get('val_acc'))
self.i += 1
f, (ax1, ax2) = plt.subplots(1, 2, sharex=True)
clear_output(wait=True)
ax1.set_yscale('Log')
ax1.plot(self.x, self.losses, label="loss")
ax1.plot(self.x, self.val_losses, label="val_loss")
ax1.legend()
ax2.plot(self.x, self.acc, label="acc")
ax2.plot(self.x, self.val_acc, label="val_acc")
ax2.legend()
plt.show()
plot = PlotLearning()
"""# Creating the Model"""
model = Sequential()
model.add(Conv2D(filters=32, kernel_size=(3,3), input_shape=image_size+(3,), activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Conv2D(filters=64, kernel_size=(3,3), input_shape=image_size+(3,), activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Conv2D(filters=64, kernel_size=(3,3), input_shape=image_size+(3,), activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.summary() # Summary of the model
model.fit_generator(train_gen, epochs=20, callbacks=[plot], validation_data=validation_gen)
model.save('malariaModel.h5')
model.evaluate_generator(validation_gen)
model.metrics_names
"""# Predicting the cell images"""
import numpy as np
from google.colab import files
from keras.preprocessing import image
uploaded = files.upload()
for fn in uploaded.keys():
# predicting images
path = fn
img = image.load_img(path, target_size=(130, 130))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
images = np.vstack([x])
classes = model.predict(images, batch_size=10)
print(fn)
print(classes)
import numpy as np
from google.colab import files
from keras.preprocessing import image
uploaded=files.upload()
for fn in uploaded.keys():
# predicting images
path='/content/' + fn
img=image.load_img(path, target_size=(130, 130))
x=image.img_to_array(img)
x=np.expand_dims(x, axis=0)
images = np.vstack([x])
classes = model.predict(images, batch_size=16)
print(classes)
if classes[0]>0:
print(fn + " is a uninfected")
else:
print(fn + " is a parasitized")
"""# Exporting the .tflite file"""
print (train_gen.class_indices)
labels = '\n'.join(sorted(train_gen.class_indices.keys()))
with open('labels.txt', 'w') as f:
f.write(labels)
"""# Exporting the lables.txt file"""
model.save('saved_model_dir')
converter = tensorflow.lite.TFLiteConverter.from_saved_model('saved_model_dir')
tflite_model = converter.convert()
open("converted_model.tflite", "wb").write(tflite_model) | 25.534884 | 107 | 0.694536 | 777 | 5,490 | 4.774775 | 0.324324 | 0.021563 | 0.025606 | 0.014016 | 0.283558 | 0.245283 | 0.245283 | 0.232345 | 0.214016 | 0.214016 | 0 | 0.024277 | 0.174681 | 5,490 | 215 | 108 | 25.534884 | 0.794527 | 0.063934 | 0 | 0.234375 | 1 | 0 | 0.075388 | 0.009534 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.148438 | null | null | 0.054688 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e82850f7289142125a27d43520f853518d3d983e | 3,580 | py | Python | pyclient/zeroos/orchestrator/client/PortForward.py | 5l1v3r1/0-orchestrator | 9373a4acb1517ff001df526925c224a7a93b3274 | [
"Apache-2.0"
] | 3 | 2017-07-04T14:02:02.000Z | 2019-07-06T23:34:08.000Z | pyclient/zeroos/orchestrator/client/PortForward.py | 5l1v3r1/0-orchestrator | 9373a4acb1517ff001df526925c224a7a93b3274 | [
"Apache-2.0"
] | 497 | 2017-05-31T07:55:40.000Z | 2018-01-03T12:10:43.000Z | pyclient/zeroos/orchestrator/client/PortForward.py | zero-os/0-orchestrator | 9373a4acb1517ff001df526925c224a7a93b3274 | [
"Apache-2.0"
] | 8 | 2017-06-14T09:45:56.000Z | 2021-02-01T18:12:55.000Z | """
Auto-generated class for PortForward
"""
from .IPProtocol import IPProtocol
from . import client_support
class PortForward(object):
"""
auto-generated. don't touch.
"""
@staticmethod
def create(dstip, dstport, protocols, srcip, srcport):
"""
:type dstip: str
:type dstport: int
:type protocols: list[IPProtocol]
:type srcip: str
:type srcport: int
:rtype: PortForward
"""
return PortForward(
dstip=dstip,
dstport=dstport,
protocols=protocols,
srcip=srcip,
srcport=srcport,
)
def __init__(self, json=None, **kwargs):
if json is None and not kwargs:
raise ValueError('No data or kwargs present')
class_name = 'PortForward'
create_error = '{cls}: unable to create {prop} from value: {val}: {err}'
required_error = '{cls}: missing required property {prop}'
data = json or kwargs
property_name = 'dstip'
val = data.get(property_name)
if val is not None:
datatypes = [str]
try:
self.dstip = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'dstport'
val = data.get(property_name)
if val is not None:
datatypes = [int]
try:
self.dstport = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'protocols'
val = data.get(property_name)
if val is not None:
datatypes = [IPProtocol]
try:
self.protocols = client_support.list_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'srcip'
val = data.get(property_name)
if val is not None:
datatypes = [str]
try:
self.srcip = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'srcport'
val = data.get(property_name)
if val is not None:
datatypes = [int]
try:
self.srcport = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
def __str__(self):
return self.as_json(indent=4)
def as_json(self, indent=0):
return client_support.to_json(self, indent=indent)
def as_dict(self):
return client_support.to_dict(self)
| 33.773585 | 107 | 0.602235 | 412 | 3,580 | 5.065534 | 0.162621 | 0.114998 | 0.067082 | 0.09104 | 0.607092 | 0.607092 | 0.607092 | 0.607092 | 0.607092 | 0.607092 | 0 | 0.000806 | 0.307263 | 3,580 | 105 | 108 | 34.095238 | 0.840726 | 0.053352 | 0 | 0.52 | 1 | 0 | 0.049349 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.026667 | 0.04 | 0.16 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e82ee867b29e0bc5c4dbea0e239b19f525221a5a | 1,641 | py | Python | tests/test_budget_type.py | Albawsalatn/compass-budget-data-entry-test | 51d9ee543075fee513492994c854a918596599b0 | [
"MIT"
] | null | null | null | tests/test_budget_type.py | Albawsalatn/compass-budget-data-entry-test | 51d9ee543075fee513492994c854a918596599b0 | [
"MIT"
] | null | null | null | tests/test_budget_type.py | Albawsalatn/compass-budget-data-entry-test | 51d9ee543075fee513492994c854a918596599b0 | [
"MIT"
] | null | null | null | """Tests for the budget_type table"""
import logging
import numpy as np
import pandas as pd
import pytest
logger = logging.getLogger("compass-budget")
def test_budget_type_df_name_is_unique(budget_type_df):
duplicated = budget_type_df.name.dropna().duplicated()
logger.debug(duplicated.value_counts())
assert any(duplicated) is False
def test_budget_type_df_name_has_id(budget_type_df):
"""tests if an id is null for a row that has a not null name
Arguments:
budget_type_df {pandas.DataFrame}
"""
# Rows that have non null `name`
not_null_name = budget_type_df.name.notnull()
not_null_id = pd.to_numeric(
budget_type_df.id[not_null_name], errors="coerce"
).notnull()
logger.debug(not_null_id)
assert all(not_null_id) is True
def test_budget_type_df_parent_id_is_in_id(budget_type_df):
ids = list(budget_type_df.id.dropna())
ids.append(0)
logger.debug(budget_type_df[budget_type_df.parent_id.isin(ids)])
# Used = instead of is because .all() returns numpy.bool
assert budget_type_df.parent_id.dropna().isin(ids).all() == True
def test_budget_type_df_parent_name_is_empty_if_parent_id_is_zero(budget_type_df):
"""Tests if there is a parent name with a parent_id of 0
Arguments:
budget_type_df {pandas.DataFrame}
"""
# TODO: test for the opposite
parent_id_zero = budget_type_df.parent_id == 0
name_notnull = budget_type_df.name.notnull()
# TODO: find better name
truth = budget_type_df[parent_id_zero & name_notnull].parent_name.isnull()
# TODO: log rows causing prblem
assert truth.all()
| 29.836364 | 82 | 0.726996 | 261 | 1,641 | 4.233716 | 0.298851 | 0.190045 | 0.217195 | 0.097738 | 0.309502 | 0.159276 | 0.052489 | 0 | 0 | 0 | 0 | 0.002232 | 0.180987 | 1,641 | 54 | 83 | 30.388889 | 0.81994 | 0.250457 | 0 | 0 | 0 | 0 | 0.017007 | 0 | 0 | 0 | 0 | 0.018519 | 0.153846 | 1 | 0.153846 | false | 0.038462 | 0.153846 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e82efbb1257660693344e95b54861b8b31f9fd51 | 265 | py | Python | 2/2.1.py | Ingomancer/AoC2020 | 3c74da26769f188bacfc8e1bef56f9957cab4cb6 | [
"MIT"
] | null | null | null | 2/2.1.py | Ingomancer/AoC2020 | 3c74da26769f188bacfc8e1bef56f9957cab4cb6 | [
"MIT"
] | null | null | null | 2/2.1.py | Ingomancer/AoC2020 | 3c74da26769f188bacfc8e1bef56f9957cab4cb6 | [
"MIT"
] | null | null | null | matches = 0
for line in open('2/input.txt'):
acceptable_range, letter, password = line.split(" ")
low, high = acceptable_range.split("-")
count = password.count(letter[0])
if count in range(int(low), int(high)+1):
matches += 1
print(matches) | 33.125 | 56 | 0.641509 | 38 | 265 | 4.421053 | 0.552632 | 0.178571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023474 | 0.196226 | 265 | 8 | 57 | 33.125 | 0.765258 | 0 | 0 | 0 | 0 | 0 | 0.048872 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.25 | 0 | 0 | 0 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
e837657986e6463c0cab1d3c850968f6934b73c9 | 1,893 | py | Python | tests/test_magic.py | bbonf/rpyc | 2c66dd6936a0d9e6e36c1ba0cda1139676acf95c | [
"MIT"
] | null | null | null | tests/test_magic.py | bbonf/rpyc | 2c66dd6936a0d9e6e36c1ba0cda1139676acf95c | [
"MIT"
] | null | null | null | tests/test_magic.py | bbonf/rpyc | 2c66dd6936a0d9e6e36c1ba0cda1139676acf95c | [
"MIT"
] | null | null | null | import sys
import rpyc
import unittest
is_py3 = sys.version_info >= (3,)
class Meta(type):
def __hash__(self):
return 4321
Base = Meta('Base', (object,), {})
class Foo(Base):
def __hash__(self):
return 1234
class Bar(Foo):
pass
class Mux(Foo):
def __eq__(self, other):
return True
class TestContextManagers(unittest.TestCase):
def setUp(self):
self.conn = rpyc.classic.connect_thread()
def tearDown(self):
self.conn.close()
def test_hash_class(self):
hesh = self.conn.builtins.hash
mod = self.conn.modules.test_magic
self.assertEqual(hash(mod.Base), 4321)
self.assertEqual(hash(mod.Foo), 4321)
self.assertEqual(hash(mod.Bar), 4321)
self.assertEqual(hash(mod.Base().__class__), 4321)
self.assertEqual(hash(mod.Foo().__class__), 4321)
self.assertEqual(hash(mod.Bar().__class__), 4321)
basecl_ = mod.Foo().__class__.__mro__[1]
object_ = mod.Foo().__class__.__mro__[2]
self.assertEqual(hash(basecl_), hesh(basecl_))
self.assertEqual(hash(object_), hesh(object_))
self.assertEqual(hash(object_), hesh(self.conn.builtins.object))
def test_hash_obj(self):
hesh = self.conn.builtins.hash
mod = self.conn.modules.test_magic
obj = mod.Base()
self.assertNotEqual(hash(obj), 1234)
self.assertNotEqual(hash(obj), 4321)
self.assertEqual(hash(obj), hesh(obj))
self.assertEqual(hash(mod.Foo()), 1234)
self.assertEqual(hash(mod.Bar()), 1234)
if is_py3:
# py3 implicitly adds '__hash__=None' during class construction
# if '__eq__ is defined:
self.assertRaises(TypeError, lambda: hash(mod.Mux()))
else:
self.assertEqual(hash(mod.Mux()), 1234)
if __name__ == "__main__":
unittest.main()
| 27.042857 | 75 | 0.626519 | 233 | 1,893 | 4.776824 | 0.266094 | 0.175202 | 0.221923 | 0.177898 | 0.359389 | 0.21204 | 0.098832 | 0.098832 | 0.098832 | 0.098832 | 0 | 0.04025 | 0.238774 | 1,893 | 69 | 76 | 27.434783 | 0.73213 | 0.044374 | 0 | 0.12 | 0 | 0 | 0.006645 | 0 | 0 | 0 | 0 | 0 | 0.32 | 1 | 0.14 | false | 0.02 | 0.06 | 0.06 | 0.36 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
08fb3adf2bc06dbc4f055dfbdfb8476d2b28eaf3 | 564 | py | Python | pandas_ml/skaccessors/pipeline.py | matsavage/pandas-ml | 794cddc8dc5d0a49fbc9734d826d9465078f376e | [
"BSD-3-Clause"
] | 305 | 2016-02-21T06:35:25.000Z | 2022-03-30T11:53:31.000Z | pandas_ml/skaccessors/pipeline.py | matsavage/pandas-ml | 794cddc8dc5d0a49fbc9734d826d9465078f376e | [
"BSD-3-Clause"
] | 69 | 2016-02-16T08:10:46.000Z | 2022-03-04T14:36:12.000Z | pandas_ml/skaccessors/pipeline.py | matsavage/pandas-ml | 794cddc8dc5d0a49fbc9734d826d9465078f376e | [
"BSD-3-Clause"
] | 73 | 2016-02-16T08:27:28.000Z | 2022-03-10T06:57:51.000Z | #!/usr/bin/env python
from pandas_ml.core.accessor import _AccessorMethods
class PipelineMethods(_AccessorMethods):
"""
Accessor to ``sklearn.pipeline``.
"""
_module_name = 'sklearn.pipeline'
@property
def make_pipeline(self):
"""``sklearn.pipeline.make_pipeline``"""
# not included in __all__
return self._module.make_pipeline
@property
def make_union(self):
"""``sklearn.pipeline.make_union``"""
# not included in __all__
return self._module.make_union
| 23.5 | 53 | 0.625887 | 58 | 564 | 5.724138 | 0.482759 | 0.180723 | 0.114458 | 0.138554 | 0.216867 | 0.216867 | 0.216867 | 0.216867 | 0 | 0 | 0 | 0 | 0.260638 | 564 | 23 | 54 | 24.521739 | 0.796163 | 0.299645 | 0 | 0.222222 | 0 | 0 | 0.046512 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
08fedbfd0bfd5034344b78da9766f4f645c32edb | 571 | py | Python | monty_hall_problem.py | luxorv/statistics | f9b9938b1f6aec475cd942871128e16f6ebccc1a | [
"MIT"
] | null | null | null | monty_hall_problem.py | luxorv/statistics | f9b9938b1f6aec475cd942871128e16f6ebccc1a | [
"MIT"
] | null | null | null | monty_hall_problem.py | luxorv/statistics | f9b9938b1f6aec475cd942871128e16f6ebccc1a | [
"MIT"
] | null | null | null | from random import randint
N = 1000
def simulate(N):
K = 0
car_choice = randint(1, 3)
for i in range(N):
my_choice = randint(1, 3)
if my_choice == car_choice:
monte_choice = randint(1, 3)
while monte_choice == car_choice:
monte_choice = randint(1, 3)
else:
monte_choice = 6 - car_choice - my_choice
second_choice = 6 - my_choice - monte_choice
if second_choice == car_choice:
K += 1
return float(K) / float(N)
print(simulate(N))
| 19.689655 | 56 | 0.544658 | 76 | 571 | 3.881579 | 0.368421 | 0.152542 | 0.189831 | 0.20339 | 0.237288 | 0.237288 | 0.237288 | 0.237288 | 0 | 0 | 0 | 0.044199 | 0.366025 | 571 | 28 | 57 | 20.392857 | 0.770718 | 0 | 0 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.055556 | 0 | 0.166667 | 0.055556 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c126f15776b35e4006c6d9793f534f98154696b | 1,902 | py | Python | costar_task_plan/python/costar_task_plan/tools/evaluate_mcts.py | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 66 | 2018-10-31T04:58:53.000Z | 2022-03-17T02:32:25.000Z | costar_task_plan/python/costar_task_plan/tools/evaluate_mcts.py | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 8 | 2018-10-23T21:19:25.000Z | 2018-12-03T02:08:41.000Z | costar_task_plan/python/costar_task_plan/tools/evaluate_mcts.py | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 25 | 2018-10-19T00:54:17.000Z | 2021-10-10T08:28:15.000Z | import os
import numpy as np
# TODO(cpaxton): remove pygame from this
#import pygame as pg
from costar_task_plan.mcts import Node
'''
loop over all MCTS scenarios
- generate the scenarios you need to collect the data
- create
'''
def mctsLoop(env, policies, seed, save, animate, **kwargs):
if seed is not None:
world_id = int(seed)
else:
world_id = np.random.randint(10000)
np.random.seed(world_id)
env.reset()
world = env._world
current_root = Node(world=world)
done = current_root.terminal
if policies._rollout is None:
rollout = "norollout"
else:
rollout = "rollout"
if policies._dfs:
dfs = "_dfs"
else:
dfs = ""
if policies._sample is not None:
sample = policies._sample.getName()
else:
sample = "none"
dirname = "world%d_%s_%s%s" % (world_id, sample, rollout, dfs)
if save or animate:
window = world._getScreen()
os.mkdir(dirname)
while not done:
# planning loop: determine the set of policies
for i in xrange(kwargs['iter']):
# do whatever you want here
policies.explore(current_root)
path = policies.extract(current_root)
# execute loop: follow these policies for however long we are supposed
# to follow them according to their conditions
while current_root.state.t < 1.0:
# compute the next action according to the current policy
# if a policy is finished, pop it off of the stack
pass
done = current_root.terminal
if animate:
# show the current window
pass
# if save:
# # Save pygame image to disk
# pg.image.save(window, "%s/iter%d.png"%(dirname,iter))
if done:
break
# update current root
| 26.054795 | 78 | 0.592534 | 242 | 1,902 | 4.570248 | 0.475207 | 0.06962 | 0.016275 | 0.041591 | 0.045208 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005464 | 0.326498 | 1,902 | 72 | 79 | 26.416667 | 0.857924 | 0.256046 | 0 | 0.2 | 1 | 0 | 0.033077 | 0 | 0 | 0 | 0 | 0.013889 | 0 | 1 | 0.025 | false | 0.05 | 0.075 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c1e21a388112144e14afd772a8e3cfb6b7935b0 | 202 | py | Python | Easy/count_negative_numbers_in_a_sorted_matrix.py | BrynjarGeir/LeetCode | dbd57e645c5398dec538b6466215b61491c8d1d9 | [
"MIT"
] | null | null | null | Easy/count_negative_numbers_in_a_sorted_matrix.py | BrynjarGeir/LeetCode | dbd57e645c5398dec538b6466215b61491c8d1d9 | [
"MIT"
] | null | null | null | Easy/count_negative_numbers_in_a_sorted_matrix.py | BrynjarGeir/LeetCode | dbd57e645c5398dec538b6466215b61491c8d1d9 | [
"MIT"
] | null | null | null | class Solution:
def countNegatives(self, grid: List[List[int]]) -> int:
count = 0
for row in grid:
for c in row:
if c < 0: count += 1
return count | 28.857143 | 59 | 0.49505 | 27 | 202 | 3.703704 | 0.62963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02521 | 0.410891 | 202 | 7 | 60 | 28.857143 | 0.815126 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.428571 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c2502dc29c5891a4a7e6c911efa074518aff66f | 306 | py | Python | src/marketplace/authorization/common.py | dssg/marketplace | c1ad9d25499c5dbafef3b06ba3eb1924ff1a5fbb | [
"MIT"
] | 5 | 2018-08-28T19:31:51.000Z | 2021-04-15T22:14:34.000Z | src/marketplace/authorization/common.py | dssg/marketplace | c1ad9d25499c5dbafef3b06ba3eb1924ff1a5fbb | [
"MIT"
] | 49 | 2018-07-26T01:03:23.000Z | 2022-02-10T10:46:27.000Z | src/marketplace/authorization/common.py | dssg/marketplace | c1ad9d25499c5dbafef3b06ba3eb1924ff1a5fbb | [
"MIT"
] | 2 | 2020-08-04T11:01:59.000Z | 2020-08-13T19:04:16.000Z | import rules
from django.core.exceptions import PermissionDenied
def ensure_rule(rule, *args):
if not rules.test_rule(rule, *args):
raise PermissionDenied
def ensure_user_has_permission(user, target, permission):
if not user.has_perm(permission, target):
raise PermissionDenied
| 23.538462 | 57 | 0.751634 | 39 | 306 | 5.74359 | 0.512821 | 0.169643 | 0.223214 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173203 | 306 | 12 | 58 | 25.5 | 0.885375 | 0 | 0 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c2d2fe1bb015c0395aa7a11e44cbb7e502a54cc | 6,922 | py | Python | src/healthcareapis/azext_healthcareapis/custom.py | dijyotir/azure-cli-extensions | db626a9d53f7a3a683d9629cbd3d86fdcce98118 | [
"MIT"
] | 1 | 2021-09-16T09:13:38.000Z | 2021-09-16T09:13:38.000Z | src/healthcareapis/azext_healthcareapis/custom.py | dijyotir/azure-cli-extensions | db626a9d53f7a3a683d9629cbd3d86fdcce98118 | [
"MIT"
] | null | null | null | src/healthcareapis/azext_healthcareapis/custom.py | dijyotir/azure-cli-extensions | db626a9d53f7a3a683d9629cbd3d86fdcce98118 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
# pylint: disable=too-many-statements
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=unused-argument
def create_healthcareapis(cmd, client,
resource_group,
name,
kind,
location,
access_policies_object_id,
tags=None,
etag=None,
cosmos_db_offer_throughput=None,
authentication_authority=None,
authentication_audience=None,
authentication_smart_proxy_enabled=None,
cors_origins=None,
cors_headers=None,
cors_methods=None,
cors_max_age=None,
cors_allow_credentials=None):
service_description = {}
service_description['location'] = location
service_description['kind'] = kind
service_description['properties'] = {}
service_description['properties']['access_policies'] = []
for policy in access_policies_object_id.split(','):
service_description['properties']['access_policies'].append({'object_id': policy})
service_description['properties']['cors_configuration'] = {}
service_description['properties']['cors_configuration']['origins'] = None if cors_origins is None else cors_origins.split(',')
service_description['properties']['cors_configuration']['headers'] = None if cors_headers is None else cors_headers.split(',')
service_description['properties']['cors_configuration']['methods'] = None if cors_methods is None else cors_methods.split(',')
service_description['properties']['cors_configuration']['max_age'] = cors_max_age
service_description['properties']['cors_configuration']['allow_credentials'] = cors_allow_credentials
service_description['properties']['cosmos_db_configuration'] = {}
service_description['properties']['cosmos_db_configuration']['offer_throughput'] = cosmos_db_offer_throughput
service_description['authentication_configuration'] = {}
service_description['authentication_configuration']['authority'] = authentication_authority
service_description['authentication_configuration']['audience'] = authentication_audience
service_description['authentication_configuration']['smart_proxy_enabled'] = authentication_smart_proxy_enabled
return client.create_or_update(resource_group_name=resource_group, resource_name=name, service_description=service_description)
def update_healthcareapis(cmd, client,
resource_group,
name,
kind=None,
location=None,
access_policies_object_id=None,
tags=None,
etag=None,
cosmos_db_offer_throughput=None,
authentication_authority=None,
authentication_audience=None,
authentication_smart_proxy_enabled=None,
cors_origins=None,
cors_headers=None,
cors_methods=None,
cors_max_age=None,
cors_allow_credentials=None):
service_description = client.get(resource_group_name=resource_group, resource_name=name).as_dict()
if location is not None:
service_description['location'] = location
if kind is not None:
service_description['kind'] = kind
if access_policies_object_id is not None:
service_description['properties']['access_policies'] = []
for policy in access_policies_object_id.split(','):
service_description['properties']['access_policies'].append({'object_id': policy})
if service_description['properties'].get('cors_configuration') is None:
service_description['properties']['cors_configuration'] = {}
if cors_origins is not None:
service_description['properties']['cors_configuration']['origins'] = None if cors_origins is None else cors_origins.split(',')
if cors_headers is not None:
service_description['properties']['cors_configuration']['headers'] = None if cors_headers is None else cors_headers.split(',')
if cors_methods is not None:
service_description['properties']['cors_configuration']['methods'] = None if cors_methods is None else cors_methods.split(',')
if cors_max_age is not None:
service_description['properties']['cors_configuration']['max_age'] = cors_max_age
if cors_allow_credentials is not None:
service_description['properties']['cors_configuration']['allow_credentials'] = cors_allow_credentials
if service_description['properties'].get('cosmos_db_configuration') is None:
service_description['properties']['cosmos_db_configuration'] = {}
if cosmos_db_offer_throughput is not None:
service_description['properties']['cosmos_db_configuration']['offer_throughput'] = cosmos_db_offer_throughput
if service_description['properties'].get('authentication_configuration') is None:
service_description['authentication_configuration'] = {}
if authentication_authority is not None:
service_description['authentication_configuration']['authority'] = authentication_authority
if authentication_audience is not None:
service_description['authentication_configuration']['audience'] = authentication_audience
if authentication_smart_proxy_enabled is not None:
service_description['authentication_configuration']['smart_proxy_enabled'] = authentication_smart_proxy_enabled
return client.create_or_update(resource_group_name=resource_group, resource_name=name, service_description=service_description)
def list_healthcareapis(cmd, client,
resource_group=None):
if resource_group is not None:
return client.list_by_resource_group(resource_group_name=resource_group)
return client.list()
def show_healthcareapis(cmd, client,
resource_group,
name):
return client.get(resource_group_name=resource_group, resource_name=name)
def delete_healthcareapis(cmd, client,
resource_group,
name):
return client.delete(resource_group_name=resource_group, resource_name=name)
| 56.276423 | 134 | 0.651401 | 676 | 6,922 | 6.33284 | 0.127219 | 0.176594 | 0.156973 | 0.089699 | 0.800981 | 0.724363 | 0.691194 | 0.580005 | 0.535856 | 0.535856 | 0 | 0 | 0.234181 | 6,922 | 122 | 135 | 56.737705 | 0.807583 | 0.0718 | 0 | 0.653465 | 0 | 0 | 0.178488 | 0.05721 | 0 | 0 | 0 | 0 | 0 | 1 | 0.049505 | false | 0 | 0 | 0.019802 | 0.108911 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c2ff87a59fc0949bd18ae26bc290ba7f4c77632 | 62 | py | Python | neoepiscope/version.py | jxshi/neoepiscope | 4e9b7de2f355bf1de270e17eda22d176f0bff627 | [
"MIT"
] | 18 | 2018-09-14T23:38:10.000Z | 2022-01-25T22:32:26.000Z | neoepiscope/version.py | jxshi/neoepiscope | 4e9b7de2f355bf1de270e17eda22d176f0bff627 | [
"MIT"
] | 14 | 2018-10-09T17:03:52.000Z | 2021-05-07T07:26:27.000Z | neoepiscope/version.py | jxshi/neoepiscope | 4e9b7de2f355bf1de270e17eda22d176f0bff627 | [
"MIT"
] | 18 | 2018-09-13T21:00:21.000Z | 2022-02-11T07:39:36.000Z | #!/usr/bin/env python
# coding=utf-8
version_number = "0.5.0"
| 15.5 | 24 | 0.677419 | 12 | 62 | 3.416667 | 0.916667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.072727 | 0.112903 | 62 | 3 | 25 | 20.666667 | 0.672727 | 0.532258 | 0 | 0 | 0 | 0 | 0.185185 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c3a850cb701d562f7d1a3c856e13af69a80f58d | 1,999 | py | Python | src/data_gen/ohem.py | kehuaWangfff/FashionAI_KeyPoint_Detection_Challenge_Keras | 02422f315403fae4dcd87abf90b08ae9183d75f0 | [
"MIT"
] | 169 | 2018-05-24T08:22:03.000Z | 2022-02-02T15:25:17.000Z | src/data_gen/ohem.py | Koeru/FashionAI_KeyPoint_Detection_Challenge_Keras | 0b3bd8cdee32e05619300e5466578644974279df | [
"MIT"
] | 12 | 2018-05-29T15:40:50.000Z | 2021-11-17T07:35:21.000Z | src/data_gen/ohem.py | Koeru/FashionAI_KeyPoint_Detection_Challenge_Keras | 0b3bd8cdee32e05619300e5466578644974279df | [
"MIT"
] | 54 | 2018-05-25T13:57:42.000Z | 2022-02-08T03:00:07.000Z |
import sys
sys.path.insert(0, "../unet/")
from keras.models import *
from keras.layers import *
from utils import np_euclidean_l2
from dataset import getKpNum
def generate_topk_mask_ohem(input_data, gthmap, keras_model, graph, topK, image_category, dynamicFlag=False):
'''
:param input_data: input
:param gthmap: ground truth
:param keras_model: keras model
:param graph: tf grpah to WA thread issue
:param topK: number of kp selected
:return:
'''
# do inference, and calculate loss of each channel
mimg, mmask = input_data
ximg = mimg[np.newaxis,:,:,:]
xmask = mmask[np.newaxis,:,:,:]
if len(keras_model.input_layers) == 3:
# use original mask as ohem_mask
inputs = [ximg, xmask, xmask]
else:
inputs = [ximg, xmask]
with graph.as_default():
keras_output = keras_model.predict(inputs)
# heatmap of last stage
outhmap = keras_output[-1]
channel_num = gthmap.shape[-1]
# calculate loss
mloss = list()
for i in range(channel_num):
_dtmap = outhmap[0, :, :, i]
_gtmap = gthmap[:, :, i]
loss = np_euclidean_l2(_dtmap, _gtmap)
mloss.append(loss)
# refill input_mask, set topk as 1.0 and fill 0.0 for rest
# fixme: topk may different b/w category
if dynamicFlag:
topK = getKpNum(image_category)//2
ohem_mask = adjsut_mask(mloss, mmask, topK)
ohem_gthmap = ohem_mask * gthmap
return ohem_mask, ohem_gthmap
def adjsut_mask(loss, input_mask, topk):
# pick topk loss from losses
# fill topk with 1.0 and fill the rest as 0.0
assert (len(loss) == input_mask.shape[-1]), \
"shape should be same" + str(len(loss)) + " vs " + str(input_mask.shape)
outmask = np.zeros(input_mask.shape, dtype=np.float)
topk_index = sorted(range(len(loss)), key=lambda i:loss[i])[-topk:]
for i in range(len(loss)):
if i in topk_index:
outmask[:,:,i] = 1.0
return outmask
| 27.383562 | 109 | 0.63932 | 285 | 1,999 | 4.340351 | 0.392982 | 0.04042 | 0.033953 | 0.017785 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012641 | 0.248124 | 1,999 | 72 | 110 | 27.763889 | 0.810379 | 0.228614 | 0 | 0 | 1 | 0 | 0.021362 | 0 | 0 | 0 | 0 | 0.013889 | 0.026316 | 1 | 0.052632 | false | 0 | 0.131579 | 0 | 0.236842 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c697b0118e09a01eb9ef9203eddfb110d1af845 | 965 | py | Python | anvil/microsoft/auth/anvilMicrosoftAuth.py | benlawraus/pyDALAnvilWorks | 8edc67b0fbe65bdcc0ef6fd2424f55046cacba7c | [
"MIT"
] | 6 | 2021-11-14T22:49:40.000Z | 2022-03-26T17:40:40.000Z | anvil/microsoft/auth/anvilMicrosoftAuth.py | benlawraus/pyDALAnvilWorks | 8edc67b0fbe65bdcc0ef6fd2424f55046cacba7c | [
"MIT"
] | null | null | null | anvil/microsoft/auth/anvilMicrosoftAuth.py | benlawraus/pyDALAnvilWorks | 8edc67b0fbe65bdcc0ef6fd2424f55046cacba7c | [
"MIT"
] | 1 | 2022-01-31T01:18:32.000Z | 2022-01-31T01:18:32.000Z | def get_user_access_token():
"""Get the secret access token of the currently-logged-in Microsoft user, for use with the Microsoft REST API.
Requires this app to have its own Microsoft client ID and secret. """
pass
def get_user_email():
"""Get the email address of the currently-logged-in Microsoft user.To log in with Microsoft,
call anvil_microsoft.auth.login() from form code. """
pass
def get_user_refresh_token():
"""Get the secret refresh token of the currently-logged-in Microsoft user, for use with the Microsoft REST API.
Requires this app to have its own Microsoft client ID and secret. """
pass
def login():
"""Prompt the user to log in with their Microsoft account"""
pass
def refresh_access_token(refresh_token):
"""Get a new access token from a refresh token you have saved, for use with the Microsoft REST API. Requires this
app to have its own Microsoft client ID and secret. """
pass
| 34.464286 | 117 | 0.723316 | 154 | 965 | 4.454545 | 0.298701 | 0.06414 | 0.043732 | 0.087464 | 0.590379 | 0.552478 | 0.552478 | 0.501458 | 0.501458 | 0.501458 | 0 | 0 | 0.207254 | 965 | 27 | 118 | 35.740741 | 0.896732 | 0.734715 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0.5 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
1c7de7f084b8c7573dd541a978eb9271b6edf412 | 469 | py | Python | algorithms/sorting/selection_sort.py | greglan/python_scripts | f2e98ed3fd975d79b0a6b569b65c850a7f4f3ab3 | [
"MIT"
] | null | null | null | algorithms/sorting/selection_sort.py | greglan/python_scripts | f2e98ed3fd975d79b0a6b569b65c850a7f4f3ab3 | [
"MIT"
] | null | null | null | algorithms/sorting/selection_sort.py | greglan/python_scripts | f2e98ed3fd975d79b0a6b569b65c850a7f4f3ab3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from algorithms.sorting.utils import *
def selection_sort(t):
C = Complexity()
n = len(t)
for i in range(n):
C.increase_assignments()
min_index = i
for j in range(i+1, n):
C.increase_comparisons()
if t[j] < t[min_index]:
C.increase_assignments()
min_index = j
C.increase_assignments(2)
swap(t, i, min_index)
return t, C | 21.318182 | 40 | 0.520256 | 62 | 469 | 3.790323 | 0.5 | 0.153191 | 0.255319 | 0.195745 | 0.238298 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010067 | 0.364606 | 469 | 22 | 41 | 21.318182 | 0.778523 | 0.044776 | 0 | 0.133333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.066667 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c81fd94478895506254a2b8137e9c5d31ce370f | 3,675 | py | Python | dref/migrations/0002_auto_20220208_1025.py | IFRCGo/ifrcgo-api | c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a | [
"MIT"
] | null | null | null | dref/migrations/0002_auto_20220208_1025.py | IFRCGo/ifrcgo-api | c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a | [
"MIT"
] | null | null | null | dref/migrations/0002_auto_20220208_1025.py | IFRCGo/ifrcgo-api | c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a | [
"MIT"
] | null | null | null | # Generated by Django 2.2.26 on 2022-02-08 10:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dref', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='dref',
name='total_targated_population',
),
migrations.AddField(
model_name='dref',
name='total_targeted_population',
field=models.IntegerField(blank=True, help_text='Estimated number of targeted people', null=True, verbose_name='total targeted population'),
),
migrations.AlterField(
model_name='dref',
name='anticipatory_actions',
field=models.TextField(blank=True, help_text='Description of anticipatory actions or imminent disaster', null=True, verbose_name='anticipatory actions'),
),
migrations.AlterField(
model_name='identifiedneed',
name='title',
field=models.CharField(choices=[('shelter_and_basic_household_items', 'Shelter And Basic Household Items'), ('livelihoods_and_basic_needs', 'Livelihoods And Basic Needs'), ('health', 'Health'), ('water_sanitation_and_hygiene', 'Water, Sanitation And Hygiene'), ('protection_gender_and_inclusion', 'Protection, Gender And Inclusion'), ('education', 'Education'), ('migration', 'Migration'), ('risk_reduction_climate_adaptation_and_recovery', 'Risk Reduction, Climate Adaptation And Recovery'), ('community_engagement_and _accountability', 'Community Engagement And Accountability'), ('environment_sustainability ', 'Environment Sustainability'), ('shelter_cluster_coordination', 'Shelter Cluster Coordination')], max_length=255, verbose_name='title'),
),
migrations.AlterField(
model_name='nationalsocietyaction',
name='title',
field=models.CharField(choices=[('national_society_readiness', 'National Society Readiness'), ('assessment', 'Assessment'), ('coordination', 'Coordination'), ('resource_mobilization', 'Resource Mobilization'), ('activation_of_contingency_plans', 'Activation Of Contingency Plans'), ('national_society_eoc', 'National Society EOC'), ('shelter_and_basic_household_items', 'Shelter And Basic Household Items'), ('livelihoods_and_basic_needs', 'Livelihoods And Basic Needs'), ('health', 'Health'), ('water_sanitation_and_hygiene', 'Water, Sanitation And Hygiene'), ('protection_gender_and_inclusion', 'Protection, Gender And Inclusion'), ('education', 'Education'), ('migration', 'Migration'), ('risk_reduction_climate_adaptation_and_recovery', 'Risk Reduction, Climate Adaptation And Recovery'), ('community_engagement_and _accountability', 'Community Engagement And Accountability'), ('environment_sustainability ', 'Environment Sustainability'), ('other', 'Other')], max_length=255, verbose_name='title'),
),
migrations.AlterField(
model_name='plannedintervention',
name='title',
field=models.CharField(choices=[('shelter_and_basic_household_items', 'Shelter And Basic Household Items'), ('livelihoods_and_basic_needs', 'Livelihoods And Basic Needs'), ('health', 'Health'), ('water_sanitation_and_hygiene', 'Water, Sanitation And Hygiene'), ('protection_gender_and_inclusion', 'Protection, Gender And Inclusion'), ('education', 'Education'), ('migration', 'Migration'), ('risk_reduction_climate_adaptation_and_recovery_', 'Risk Reduction, Climate Adaptation And Recovery'), ('secretariat_services', 'Secretariat Services'), ('national_society_strengthening', 'National Society Strengthening')], max_length=255, verbose_name='title'),
),
]
| 85.465116 | 1,016 | 0.716463 | 369 | 3,675 | 6.869919 | 0.273713 | 0.03787 | 0.035503 | 0.056805 | 0.616963 | 0.599606 | 0.574359 | 0.574359 | 0.574359 | 0.574359 | 0 | 0.00931 | 0.152381 | 3,675 | 42 | 1,017 | 87.5 | 0.804494 | 0.012517 | 0 | 0.444444 | 1 | 0 | 0.587814 | 0.221395 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.027778 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c8452ad223ffa838030c88d53308ea4b0de2dd0 | 512 | py | Python | tests/mock_module/mock_submodule/mock_subfile.py | yoyowallet/aws-xray-sdk-python | e082f8939d972577f62bd16cf83edb92bd3bd4d6 | [
"Apache-2.0"
] | null | null | null | tests/mock_module/mock_submodule/mock_subfile.py | yoyowallet/aws-xray-sdk-python | e082f8939d972577f62bd16cf83edb92bd3bd4d6 | [
"Apache-2.0"
] | 2 | 2018-11-07T16:49:02.000Z | 2018-11-12T17:16:32.000Z | tests/mock_module/mock_submodule/mock_subfile.py | yoyowallet/aws-xray-sdk-python | e082f8939d972577f62bd16cf83edb92bd3bd4d6 | [
"Apache-2.0"
] | null | null | null | from aws_xray_sdk.core import xray_recorder
def mock_subfunc():
pass
@xray_recorder.capture()
def mock_no_doublepatch():
pass
class MockClass(object):
def __init__(self):
pass
def mock_method(self):
pass
@classmethod
def mock_classmethod(cls):
pass
@staticmethod
def mock_staticmethod():
pass
class MockSubclass(MockClass):
def __init__(self):
super(MockSubclass, self).__init__()
def mock_submethod(self):
pass
| 14.628571 | 44 | 0.650391 | 58 | 512 | 5.344828 | 0.448276 | 0.135484 | 0.070968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.265625 | 512 | 34 | 45 | 15.058824 | 0.824468 | 0 | 0 | 0.409091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.363636 | false | 0.318182 | 0.045455 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
1c8e71a0ed3491fb72d6cdeba61441808d0fe936 | 385 | py | Python | plot_grad.py | footoredo/pytorch-a2c-ppo-acktr-gail | 3d0263c10340e9d8881f75c64eb2fe9bb0c9e2d0 | [
"MIT"
] | null | null | null | plot_grad.py | footoredo/pytorch-a2c-ppo-acktr-gail | 3d0263c10340e9d8881f75c64eb2fe9bb0c9e2d0 | [
"MIT"
] | null | null | null | plot_grad.py | footoredo/pytorch-a2c-ppo-acktr-gail | 3d0263c10340e9d8881f75c64eb2fe9bb0c9e2d0 | [
"MIT"
] | null | null | null | import numpy as np
import joblib
from a2c_ppo_acktr.multi_agent.utils import tsne
def main():
fgs_5, adv_5 = joblib.load("grad-5.obj")
fgs_10, adv_10 = joblib.load("grad-10.obj")
fgs_15, adv_15 = joblib.load("grad-15.obj")
tsne(fgs_5 + fgs_10 + fgs_15, ["g-5"] * len(fgs_5) + ["g-10"] * len(fgs_10) + ["g-15"] * len(fgs_15))
if __name__ == "__main__":
main()
| 22.647059 | 105 | 0.633766 | 70 | 385 | 3.157143 | 0.385714 | 0.054299 | 0.190045 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098726 | 0.184416 | 385 | 16 | 106 | 24.0625 | 0.605096 | 0 | 0 | 0 | 0 | 0 | 0.132468 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | true | 0 | 0.3 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1c96800c4ddb8cff7fc6c13066e23042942d88b1 | 640 | py | Python | model/team.py | ChatNoir76/Championnat | f5cd7422b812a04ea8bbe1156c3e7021b4d730bf | [
"MIT"
] | 1 | 2020-05-27T20:34:59.000Z | 2020-05-27T20:34:59.000Z | model/team.py | ChatNoir76/Championnat | f5cd7422b812a04ea8bbe1156c3e7021b4d730bf | [
"MIT"
] | null | null | null | model/team.py | ChatNoir76/Championnat | f5cd7422b812a04ea8bbe1156c3e7021b4d730bf | [
"MIT"
] | null | null | null | from model.abstractmodel import AbstractModel
class Team(AbstractModel):
def __init__(self, id_competition, name, comment=None):
super().__init__(self)
self.__id_competition = id_competition
self.__name = name
self.__comment = comment
@property
def id_competition(self):
return self.__id_competition
@property
def name(self):
return self.__name
@name.setter
def name(self, value):
self.__name = value
@property
def comment(self):
return self.__comment
@comment.setter
def comment(self, value):
self.__comment = value
| 21.333333 | 59 | 0.646875 | 71 | 640 | 5.422535 | 0.267606 | 0.168831 | 0.132468 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.270313 | 640 | 29 | 60 | 22.068966 | 0.824411 | 0 | 0 | 0.136364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.272727 | false | 0 | 0.045455 | 0.136364 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
98bfcaa547434a30d2beed2cb03efc3be34188ea | 344 | py | Python | src/libs/components/customscatterlayout.py | loghinalexandru/blackboard-greenboard | 80332bf7709e602a4d5ada31b3cf95801c06190f | [
"MIT"
] | null | null | null | src/libs/components/customscatterlayout.py | loghinalexandru/blackboard-greenboard | 80332bf7709e602a4d5ada31b3cf95801c06190f | [
"MIT"
] | null | null | null | src/libs/components/customscatterlayout.py | loghinalexandru/blackboard-greenboard | 80332bf7709e602a4d5ada31b3cf95801c06190f | [
"MIT"
] | null | null | null | import kivy
kivy.require('2.0.0')
from kivymd.uix.behaviors import TouchBehavior
from kivy.graphics.transformation import Matrix
from kivy.uix.scatterlayout import ScatterLayout
class CustomScatterLayout(TouchBehavior, ScatterLayout):
def on_double_tap(self, *args):
trans = Matrix().scale(1, 1, 1)
self.transform = trans
| 28.666667 | 56 | 0.758721 | 44 | 344 | 5.886364 | 0.590909 | 0.061776 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020478 | 0.148256 | 344 | 11 | 57 | 31.272727 | 0.863481 | 0 | 0 | 0 | 0 | 0 | 0.014535 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.444444 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
98c9de9c699a6e936fcc518c05b8a1b26b26d411 | 763 | py | Python | scraper/functions/progress_bar.py | cordeirossauro/iw-scraper | 5ba97da612ed40a54cac302b25d927b09af136b7 | [
"MIT"
] | null | null | null | scraper/functions/progress_bar.py | cordeirossauro/iw-scraper | 5ba97da612ed40a54cac302b25d927b09af136b7 | [
"MIT"
] | null | null | null | scraper/functions/progress_bar.py | cordeirossauro/iw-scraper | 5ba97da612ed40a54cac302b25d927b09af136b7 | [
"MIT"
] | null | null | null | import math
class ProgressBar:
def __init__(self, total_steps, bar_size, message, current_step = 1):
self.total_steps = total_steps
self.current_step = current_step
self.bar_size = bar_size
self.message = message
def print_bar(self):
steps_done = math.floor(
self.current_step / self.total_steps * self.bar_size
)
bar = (
" |"
+ ("█" * steps_done)
+ (" " * (self.bar_size - 1 - steps_done))
+ "|"
)
progress = f" [{self.current_step}/{self.total_steps}]"
print(self.message + progress + bar, end="\r")
def update_bar(self, current_step):
self.current_step = current_step
self.print_bar()
| 25.433333 | 73 | 0.559633 | 89 | 763 | 4.494382 | 0.269663 | 0.22 | 0.1875 | 0.1425 | 0.295 | 0.295 | 0 | 0 | 0 | 0 | 0 | 0.003884 | 0.325033 | 763 | 29 | 74 | 26.310345 | 0.770874 | 0 | 0 | 0.090909 | 0 | 0 | 0.06291 | 0.052425 | 0 | 0 | 0 | 0 | 0 | 1 | 0.136364 | false | 0 | 0.045455 | 0 | 0.227273 | 0.136364 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
98dd6107995a1d9cc5513cae4d41a5d935754de6 | 110 | py | Python | config.py | scheric/minorIoT | 77cdb17e156aa911a5f6924fc6de481fc6c41f5a | [
"MIT"
] | 42 | 2017-07-31T07:02:08.000Z | 2021-12-25T09:15:57.000Z | config.py | AzureMentor/iot-hub-python-raspberrypi-client-app | 77cdb17e156aa911a5f6924fc6de481fc6c41f5a | [
"MIT"
] | 9 | 2017-07-31T02:16:23.000Z | 2019-04-24T00:37:15.000Z | config.py | AzureMentor/iot-hub-python-raspberrypi-client-app | 77cdb17e156aa911a5f6924fc6de481fc6c41f5a | [
"MIT"
] | 44 | 2017-09-15T15:34:07.000Z | 2022-03-27T22:18:34.000Z | MESSAGE_TIMESPAN = 2000
SIMULATED_DATA = False
I2C_ADDRESS = 0x77
GPIO_PIN_ADDRESS = 24
BLINK_TIMESPAN = 1000
| 18.333333 | 23 | 0.818182 | 16 | 110 | 5.25 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.147368 | 0.136364 | 110 | 5 | 24 | 22 | 0.736842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036364 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c7034f45a455fb59646081836f84094803292e9d | 784 | py | Python | tests/example/reflexes/example_reflex.py | FarhanAliRaza/django-sockpuppet | 81a0fbf06472421604a74b39a44000a95749167a | [
"MIT"
] | 371 | 2020-04-23T18:08:15.000Z | 2022-03-30T17:11:46.000Z | tests/example/reflexes/example_reflex.py | FarhanAliRaza/django-sockpuppet | 81a0fbf06472421604a74b39a44000a95749167a | [
"MIT"
] | 90 | 2020-04-27T07:36:37.000Z | 2021-10-02T20:47:42.000Z | tests/example/reflexes/example_reflex.py | FarhanAliRaza/django-sockpuppet | 81a0fbf06472421604a74b39a44000a95749167a | [
"MIT"
] | 28 | 2020-04-30T23:21:47.000Z | 2022-02-21T17:59:46.000Z | from sockpuppet.reflex import Reflex
class ExampleReflex(Reflex):
def increment(self, step=1):
self.session['count'] = int(self.element.dataset['count']) + step
class DecrementReflex(Reflex):
def decrement(self, step=1):
self.session['otherCount'] = int(self.element.dataset['count']) - step
class ParamReflex(Reflex):
def change_word(self):
self.word = 'space'
self.success = True
class FormReflex(Reflex):
def submit(self):
self.text_output = self.request.POST['text-input']
class ErrorReflex(Reflex):
def increment(self, step=1):
raise Exception('error happened')
class UserReflex(Reflex):
def get_user(self):
context = self.get_context_data()
self.user_reveal = context['object']
| 23.058824 | 78 | 0.667092 | 95 | 784 | 5.442105 | 0.463158 | 0.104449 | 0.052224 | 0.085106 | 0.299807 | 0.239845 | 0.135397 | 0 | 0 | 0 | 0 | 0.0048 | 0.202806 | 784 | 33 | 79 | 23.757576 | 0.8224 | 0 | 0 | 0.095238 | 0 | 0 | 0.076531 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.047619 | 0 | 0.619048 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
c7108cb487123f5894d6de6249d5570bb2022267 | 851 | py | Python | backend/db/object_encoder.py | threefoldtech/actionwidget | 4c248d8c6084b343d39af1ffa6ba1997d03dc4d4 | [
"Apache-2.0"
] | null | null | null | backend/db/object_encoder.py | threefoldtech/actionwidget | 4c248d8c6084b343d39af1ffa6ba1997d03dc4d4 | [
"Apache-2.0"
] | 14 | 2020-04-02T12:51:17.000Z | 2020-09-08T09:22:30.000Z | backend/db/object_encoder.py | threefoldtech/actionwidget | 4c248d8c6084b343d39af1ffa6ba1997d03dc4d4 | [
"Apache-2.0"
] | null | null | null | import json
import inspect
class ObjectEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, "to_json"):
return self.default(obj.to_json())
elif hasattr(obj, "__dict__"):
d = dict(
(key, value)
for key, value in inspect.getmembers(obj)
if not key.startswith("__")
and not inspect.isabstract(value)
and not inspect.isbuiltin(value)
and not inspect.isfunction(value)
and not inspect.isgenerator(value)
and not inspect.isgeneratorfunction(value)
and not inspect.ismethod(value)
and not inspect.ismethoddescriptor(value)
and not inspect.isroutine(value)
)
return self.default(d)
return obj | 37 | 58 | 0.554642 | 88 | 851 | 5.272727 | 0.375 | 0.103448 | 0.224138 | 0.271552 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.365452 | 851 | 23 | 59 | 37 | 0.859259 | 0 | 0 | 0 | 0 | 0 | 0.019953 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0.090909 | 0 | 0.318182 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c71a2ab3b1e90df9be001c0ea2f40a58c3d5a23d | 2,158 | py | Python | api/models/base.py | jimbunny/LuckyBlindBox | ec672eb76122cd8475efe9e72b18fdeb133b02f9 | [
"MIT"
] | null | null | null | api/models/base.py | jimbunny/LuckyBlindBox | ec672eb76122cd8475efe9e72b18fdeb133b02f9 | [
"MIT"
] | null | null | null | api/models/base.py | jimbunny/LuckyBlindBox | ec672eb76122cd8475efe9e72b18fdeb133b02f9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#-*- coding:utf-8 -*-
# author:jingtongyu
# datetime:2020/6/7 10:14 下午
# software: PyCharm
from sqlalchemy import inspect, orm
from datetime import datetime
from . import db
class BaseModel(db.Model):
"""
data base class
"""
__abstract__ = True
# status = Column(SmallInteger, default=1)
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
is_delete = db.Column(db.BOOLEAN, default=False)
create_time = db.Column(db.DATETIME(6), default=datetime.now)
update_time = db.Column(db.DATETIME(6), default=datetime.now, onupdate=datetime.now)
def __init__(self):
# self.create_time = int(datetime.now().timestamp())
pass
def __getitem__(self, item):
return getattr(self, item)
@property
def create_datetime(self):
if self.create_time:
return datetime.fromtimestamp(self.create_time)
else:
return None
def set_attrs(self, attrs_dict):
for key, value in attrs_dict.items():
if hasattr(self, key) and key != 'id':
setattr(self, key, value)
def delete(self):
self.is_delete = True
def keys(self):
return self.fields
def hide(self, *keys):
for key in keys:
self.fields.remove(key)
return self
def append(self, *keys):
for key in keys:
self.fields.append(key)
return self
class MixinJSONSerializer:
@orm.reconstructor
def init_on_load(self):
self._fields = []
# self._include = []
self._exclude = []
self._set_fields()
self.__prune_fields()
def _set_fields(self):
pass
def __prune_fields(self):
columns = inspect(self.__class__).columns
if not self._fields:
all_columns = set(columns.keys())
self._fields = list(all_columns - set(self._exclude))
def hide(self, *args):
for key in args:
self._fields.remove(key)
return self
def keys(self):
return self._fields
def __getitem__(self, key):
return getattr(self, key)
| 24.522727 | 88 | 0.608434 | 267 | 2,158 | 4.715356 | 0.344569 | 0.063542 | 0.031771 | 0.02224 | 0.200953 | 0.200953 | 0.200953 | 0.112788 | 0.065131 | 0 | 0 | 0.009038 | 0.282206 | 2,158 | 87 | 89 | 24.804598 | 0.803744 | 0.10658 | 0 | 0.157895 | 0 | 0 | 0.00105 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.245614 | false | 0.035088 | 0.052632 | 0.070175 | 0.578947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
c71a53083400eb87b61a8346374bdbe3c415a4f7 | 4,121 | py | Python | kishimoto_trajectory.py | is0280fp/Side-by-Side-Walking-Model | c1bbed8c4ebd74f09729e77d1cbdf8772c038f36 | [
"MIT"
] | 1 | 2017-10-01T15:05:50.000Z | 2017-10-01T15:05:50.000Z | kishimoto_trajectory.py | is0280fp/Side-by-Side-Walking-Model | c1bbed8c4ebd74f09729e77d1cbdf8772c038f36 | [
"MIT"
] | null | null | null | kishimoto_trajectory.py | is0280fp/Side-by-Side-Walking-Model | c1bbed8c4ebd74f09729e77d1cbdf8772c038f36 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 6 19:33:27 2018
@author: yume
"""
import numpy as np
import matplotlib.pyplot as plt
def load_default_trajectory():
ps = np.array(([
[-0.77703479856881415, 1.4993181096841063],
[-0.70776038682731871, 1.4170221119724254],
[-0.68260690865884658, 1.4206095214452887],
[-0.61961335350444722, 1.396403374501471],
[-0.52452975175408619, 1.3120215099603865],
[-0.41054581005311593, 1.2300884769965503],
[-0.38567688612738783, 1.1262364239010458],
[-0.30115968064468291, 1.0616980649371949],
[-0.22130623319182521, 0.933914655648039],
[-0.14595070460897966, 0.82531833169704305],
[-0.073066997284054381, 0.71924080684385058],
[0.010223062162865558, 0.61421203570745217],
[0.065266998498120728, 0.50483296951356215],
[0.12787204045661326, 0.39646585780470696],
[0.18682160531477655, 0.26910195060268484],
[0.19791426786241169, 0.14344376640017222],
[0.30767184917455126, 0.63371568346808751],
[0.38202960019823439, -0.0133898000261839],
[0.44891710879876359, -0.080474866781608853],
[0.53705971610686458, -0.17921153739275405],
[0.62670128183993188, -0.27472851793244945],
[0.73939023996717992, -0.35063782321276187],
[0.80149477851952372, -0.34912297166337368],
[0.87046846035186309, -0.36280663000355622],
[0.98847297805636788, -0.41429711277275322],
[1.0520553474302326, -0.40133954562272993],
[1.1723587292417847, -0.37754650997976182],
[1.3530005099056032, -0.41721106503568262],
[1.3546287979263141, -0.38136422040072085],
[1.4902199436937522, -0.38493808898552663],
[1.6353635418584515, -0.34246197144551019],
[1.946088334976178, -0.34526603325326118],
[1.8313254083319275, -0.27676211872652652],
[2.1328239494248364, -0.27541768265452879],
[2.146202914656159, -0.22981560969963232],
[2.3364057587098828, -0.18565649458770156],
[2.4240777509727509, -0.14133234626209409],
[1.9430180862343152, -0.094173374304326487],
[2.337419604694207, -0.092612605522184211],
[2.4721016275926509, -0.07533703002757332]
]))
ps_for_d = np.array([[[-0.03665494, 0.12333371],
[ 0.06927441, -0.082296 ],
[ 0.02515348, 0.00358741],
[ 0.06299356, -0.02420615],
[ 0.0950836 , -0.08438186],
[ 0.11398394, -0.08193303],
[ 0.02486892, -0.10385205],
[ 0.08451721, -0.06453836],
[ 0.07985345, -0.12778341],
[ 0.07535553, -0.10859632],
[ 0.07288371, -0.10607752],
[ 0.08329006, -0.10502877],
[ 0.05504394, -0.10937907],
[ 0.06260504, -0.10836711],
[ 0.05894956, -0.12736391],
[ 0.01109266, -0.12565818],
[ 0.10975758, 0.49027192],
[ 0.07435775, -0.64710548],
[ 0.06688751, -0.06708507],
[ 0.08814261, -0.09873667],
[ 0.08964157, -0.09551698],
[ 0.11268896, -0.07590931],
[ 0.06210454, 0.00151485],
[ 0.06897368, -0.01368366],
[ 0.11800452, -0.05149048],
[ 0.06358237, 0.01295757],
[ 0.12030338, 0.02379304],
[ 0.18064178, -0.03966456],
[ 0.00162829, 0.03584684],
[ 0.13559115, -0.00357387],
[ 0.1451436 , 0.04247612],
[ 0.31072479, -0.00280406],
[-0.11476293, 0.06850391],
[ 0.30149854, 0.00134444],
[ 0.01337897, 0.04560207],
[ 0.19020284, 0.04415912],
[ 0.08767199, 0.04432415],
[-0.48105966, 0.04715897],
[ 0.39440152, 0.00156077],
[ 0.13468202, 0.01727558]]])
return (ps, ps_for_d)
if __name__ == '__main__':
ps, ds_for_d = load_default_trajectory()
for p in ps:
plt.plot(p[0], p[1], "*", color='#ff7f00') | 40.80198 | 58 | 0.574375 | 382 | 4,121 | 6.149215 | 0.528796 | 0.005109 | 0.01788 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.722353 | 0.278088 | 4,121 | 101 | 59 | 40.80198 | 0.067227 | 0.017714 | 0 | 0 | 0 | 0 | 0.003959 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.011111 | false | 0 | 0.022222 | 0 | 0.044444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c72180d31270a576c2de283104fe10ba5c97045b | 2,374 | py | Python | bundle-workflow/tests/tests_manifests/test_input_manifest.py | nknize/opensearch-build | 68e19a4c1c1005475ac070d583cdf0859f7807e0 | [
"Apache-2.0"
] | null | null | null | bundle-workflow/tests/tests_manifests/test_input_manifest.py | nknize/opensearch-build | 68e19a4c1c1005475ac070d583cdf0859f7807e0 | [
"Apache-2.0"
] | null | null | null | bundle-workflow/tests/tests_manifests/test_input_manifest.py | nknize/opensearch-build | 68e19a4c1c1005475ac070d583cdf0859f7807e0 | [
"Apache-2.0"
] | null | null | null | # SPDX-License-Identifier: Apache-2.0
#
# The OpenSearch Contributors require contributions made to
# this file be licensed under the Apache-2.0 license or a
# compatible open source license.
import os
import unittest
import yaml
from manifests.input_manifest import InputManifest
class TestInputManifest(unittest.TestCase):
def setUp(self):
self.maxDiff = None
self.manifests_path = os.path.realpath(
os.path.join(os.path.dirname(__file__), "../../../manifests")
)
def test_1_0(self):
path = os.path.join(self.manifests_path, "opensearch-1.0.0.yml")
manifest = InputManifest.from_path(path)
self.assertEqual(manifest.version, "1.0")
self.assertEqual(manifest.build.name, "OpenSearch")
self.assertEqual(manifest.build.version, "1.0.0")
self.assertEqual(len(manifest.components), 12)
opensearch_component = manifest.components[0]
self.assertEqual(opensearch_component.name, "OpenSearch")
self.assertEqual(
opensearch_component.repository,
"https://github.com/opensearch-project/OpenSearch.git",
)
self.assertEqual(opensearch_component.ref, "1.0")
for component in manifest.components:
self.assertIsInstance(component.ref, str)
def test_1_1(self):
path = os.path.join(self.manifests_path, "opensearch-1.1.0.yml")
manifest = InputManifest.from_path(path)
self.assertEqual(manifest.version, "1.0")
self.assertEqual(manifest.build.name, "OpenSearch")
self.assertEqual(manifest.build.version, "1.1.0")
self.assertEqual(len(manifest.components), 14)
opensearch_component = manifest.components[0]
self.assertEqual(opensearch_component.name, "OpenSearch")
self.assertEqual(
opensearch_component.repository,
"https://github.com/opensearch-project/OpenSearch.git",
)
self.assertEqual(opensearch_component.ref, "1.x")
for component in manifest.components:
self.assertIsInstance(component.ref, str)
def test_to_dict(self):
path = os.path.join(self.manifests_path, "opensearch-1.1.0.yml")
manifest = InputManifest.from_path(path)
data = manifest.to_dict()
with open(path) as f:
self.assertEqual(yaml.safe_load(f), data)
| 38.290323 | 73 | 0.672283 | 279 | 2,374 | 5.620072 | 0.268817 | 0.143495 | 0.08801 | 0.130102 | 0.712372 | 0.712372 | 0.665179 | 0.665179 | 0.665179 | 0.665179 | 0 | 0.019262 | 0.212721 | 2,374 | 61 | 74 | 38.918033 | 0.81969 | 0.076243 | 0 | 0.479167 | 0 | 0 | 0.111568 | 0 | 0 | 0 | 0 | 0 | 0.354167 | 1 | 0.083333 | false | 0 | 0.083333 | 0 | 0.1875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c722cf07e684f4aa30f41cfcb2726b2e4431860d | 2,490 | py | Python | tests/games/mocsar/test_dealer.py | cogitoergoread/muszi-macrohard.hu | e9bbd36b789e670f96622a3a2ba8327f0d897561 | [
"MIT"
] | 1 | 2021-05-27T03:40:37.000Z | 2021-05-27T03:40:37.000Z | tests/games/mocsar/test_dealer.py | cogitoergoread/muszi-macrohard.hu | e9bbd36b789e670f96622a3a2ba8327f0d897561 | [
"MIT"
] | null | null | null | tests/games/mocsar/test_dealer.py | cogitoergoread/muszi-macrohard.hu | e9bbd36b789e670f96622a3a2ba8327f0d897561 | [
"MIT"
] | 1 | 2022-02-24T11:25:22.000Z | 2022-02-24T11:25:22.000Z | import pytest
from rlcard3.games.mocsar.card import Ertekek
from rlcard3.games.mocsar.dealer import MocsarDealer as Dealer
from rlcard3.games.mocsar.player import MocsarPlayer as Player
from rlcard3.games.mocsar.utils import str_to_card_list
def test_dealer_default():
"""
Test default constructor
"""
dealer = Dealer()
assert dealer.__str__() == "[♣3,♡3,♢3,♠3,♣4,♡4,♢4,♠4,♣5,♡5,♢5,♠5,♣6,♡6,♢6,♠6,♣7,♡7,♢7,♠7,♣8,♡8,♢8,♠8," \
"♣9,♡9,♢9,♠9,♣0,♡0,♢0,♠0,♣J,♡J,♢J,♠J,♣Q,♡Q,♢Q,♠Q,♣K,♡K,♢K,♠K,♣A,♡A,♢A,♠A,♣2,♡2,♢2,♠2," \
"**,**,**]"
@pytest.mark.parametrize('nr_card, szov', [
(1, '[**]'),
(2, '[**,**]'),
(3, '[**,**,**]'),
(4, '[♣A,♡A,♢A,♠A]'),
(5, '[♣A,♡A,♢A,♠A,**]'),
(9, '[♣A,♡A,♢A,♠A,♣2,♡2,♢2,♠2,**]'), # MIvel a 2-es mapelt Jokerre
(55,
'[♣3,♡3,♢3,♠3,♣4,♡4,♢4,♠4,♣5,♡5,♢5,♠5,♣6,♡6,♢6,♠6,♣7,♡7,♢7,♠7,♣8,♡8,♢8,♠8,♣9,♡9,♢9,♠9,♣0,♡0,♢0,♠0,♣J,♡J,♢J,♠J,'
'♣Q,♡Q,♢Q,♠Q,♣K,♡K,♢K,♠K,♣A,♡A,♢A,♠A,♣2,♡2,♢2,♠2,**,**,**]'),
(56,
'[♣3,♡3,♢3,♠3,♣4,♡4,♢4,♠4,♣5,♡5,♢5,♠5,♣6,♡6,♢6,♠6,♣7,♡7,♢7,♠7,♣8,♡8,♢8,♠8,♣9,♡9,♢9,♠9,♣0,♡0,♢0,♠0,♣J,♡J,♢J,♠J,'
'♣Q,♡Q,♢Q,♠Q,♣K,♡K,♢K,♠K,♣A,♡A,♢A,♠A,♣2,♡2,♢2,♠2,**,**,**,**]'),
(59,
'[♣3,♡3,♢3,♠3,♣4,♡4,♢4,♠4,♣5,♡5,♢5,♠5,♣6,♡6,♢6,♠6,♣7,♡7,♢7,♠7,♣8,♡8,♢8,♠8,♣9,♡9,♢9,♠9,♣0,♡0,♢0,♠0,♣J,♡J,♢J,♠J,'
'♣Q,♡Q,♢Q,♠Q,♣K,♡K,♢K,♠K,♣A,♡A,♢A,♠A,♣A,♡A,♢A,♠A,♣2,♡2,♢2,♠2,**,**,**]'),
])
def test_dealer_param(nr_card, szov):
"""
Test Dealer with param constructor
"""
dealer = Dealer(nr_card)
assert dealer.__str__() == szov
@pytest.mark.parametrize('nr_card, nr_player, card_list', [
(9, 2, ['[♣A,♢A,♣2,♢2,**]', '[♡A,♠A,♡2,♠2]']),
(9, 3, ['[♣A,♣A,♠2]', '[♢A,♠2,♡2]', '[♡A,♢2,**]']),
(9, 4, ['[♣A,♣2,**]', '[♡A,♡2]', '[♢A,♢2]', '[♠A,♠2]']),
(9, 5, ['[♡A,♣2]', '[♢A,♡2]', '[♠A,♢2]', '[♠A,**]', '[♣2]']),
])
def test_deal_cards(nr_card, nr_player, card_list):
""" TEst to deal the cards among the players"""
players = [Player(i) for i in range(nr_player)]
dealer = Dealer(nr_card)
dealer.deal_cards(players, [i for i in range(nr_player)], False)
i = 0
for cards in card_list:
cardli = str_to_card_list(cardstr=cards)
assert len(cardli) == len(players[i].hand)
for j in range(len(cardli)):
card_to_compare = cardli[j]
card_to_compare.map_joker(Ertekek.C2)
assert players[i].hand[j] == card_to_compare
i += 1
| 38.307692 | 119 | 0.457831 | 719 | 2,490 | 1.890125 | 0.098748 | 0.01766 | 0.019868 | 0.023547 | 0.440029 | 0.397351 | 0.364974 | 0.335541 | 0.323767 | 0.323767 | 0 | 0.094535 | 0.184337 | 2,490 | 64 | 120 | 38.90625 | 0.446578 | 0.051807 | 0 | 0.142857 | 0 | 0.163265 | 0.396299 | 0.300344 | 0 | 0 | 0 | 0 | 0.081633 | 1 | 0.061224 | false | 0 | 0.102041 | 0 | 0.163265 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c728a8568115e8eb73be1d5a92f063f4ca7b8de7 | 472 | py | Python | examples/football_calibration.py | ereide/pyga-camcal | fd25748ddb11c5b05ef24a2deca2689e0d899875 | [
"MIT"
] | 5 | 2018-05-22T09:11:31.000Z | 2022-03-11T02:32:01.000Z | examples/football_calibration.py | ereide/pyga-camcal | fd25748ddb11c5b05ef24a2deca2689e0d899875 | [
"MIT"
] | null | null | null | examples/football_calibration.py | ereide/pyga-camcal | fd25748ddb11c5b05ef24a2deca2689e0d899875 | [
"MIT"
] | null | null | null |
from calibration_common import *
from pygacal.geometry.transformations import *
from resources.football_court.court_image import *
from resources.football_court.court_model import *
if __name__ == "__main__":
img_model = FootballManCityCourtImage2()
model = PenaltyAreaFootballModel()
R_min = calibrate(model, img_model)
project_lines(model, img_model, R_min)
theta, t = versor_to_param(R_min)
print(versor_to_param(R_min))
| 18.88 | 50 | 0.739407 | 57 | 472 | 5.684211 | 0.508772 | 0.049383 | 0.117284 | 0.166667 | 0.333333 | 0.228395 | 0 | 0 | 0 | 0 | 0 | 0.002591 | 0.182203 | 472 | 24 | 51 | 19.666667 | 0.836788 | 0 | 0 | 0 | 0 | 0 | 0.017167 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.363636 | 0 | 0.363636 | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
c74169ab491bfd6937a30f9a3e7834c9bcbab8cf | 1,908 | py | Python | models/user.py | linkian209/keyforge_league | 6df5f21a7a86edb6e501264eb478134d1c57d675 | [
"MIT"
] | null | null | null | models/user.py | linkian209/keyforge_league | 6df5f21a7a86edb6e501264eb478134d1c57d675 | [
"MIT"
] | null | null | null | models/user.py | linkian209/keyforge_league | 6df5f21a7a86edb6e501264eb478134d1c57d675 | [
"MIT"
] | null | null | null | """models.user
This Module contains the User Model
"""
from app import db
from flask_login import UserMixin
from sqlalchemy import sql
class User(UserMixin, db.Model):
___tablename__ = 'user'
"""
This model is used to store users. There is a mixin that manages sessions as well.
Attributes:
id (int): User ID
name (str): User's Name
email (str): User's Email
password (str): A hashed version of the User's password
decks (relationship): Decks owned by user
seasons (relationship): Player's seasonal instances
total_wins (int): Total number of agmes this user has won
total_losses (int): Total number
create_date (DateTime): When the user was created
update_date (DateTime): When the user was last updated
Args:
:param UserMixin: The Mixin from flask_login that maintains sessions
:param db.Model: SQLAlchemy base class
"""
# Attributes
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(128), nullable=False)
email = db.Column(db.String(128), nullable=False)
password = db.Column(db.String(128), nullable=False)
decks = db.relationship(
'Deck', backref='user', lazy=True, passive_deletes=True
)
seasons = db.relationship('SeasonPlayer', backref='user')
total_wins = db.Column(db.Integer, default=0)
total_losses = db.Column(db.Integer, default=0)
create_date = db.Column(db.DateTime, server_default=sql.func.now())
update_date = db.Column(
db.DateTime, server_default=sql.func.now(), onupdate=sql.func.now()
)
# Functions
def __repr__(self):
"""
Representaion of this object as a string
Args:
:param self: This user object
Returns:
str: String representation of the user
"""
return '<User {}>'.format(self.name) | 32.896552 | 86 | 0.651992 | 252 | 1,908 | 4.845238 | 0.400794 | 0.052416 | 0.06552 | 0.041769 | 0.235872 | 0.235872 | 0.152334 | 0.07371 | 0.07371 | 0.07371 | 0 | 0.007671 | 0.248428 | 1,908 | 58 | 87 | 32.896552 | 0.843794 | 0.107442 | 0 | 0 | 0 | 0 | 0.041249 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0.095238 | 0.142857 | 0 | 0.809524 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
c7428e78384a54b8e242d602515bd1128afe5518 | 1,160 | py | Python | gopublish/api/token.py | mboudet/go-publish | f258878e8ee77711576c69d2a39e9398cea3fade | [
"MIT"
] | null | null | null | gopublish/api/token.py | mboudet/go-publish | f258878e8ee77711576c69d2a39e9398cea3fade | [
"MIT"
] | 5 | 2021-07-01T13:21:45.000Z | 2022-02-11T13:55:35.000Z | gopublish/api/token.py | mboudet/gopublish | f258878e8ee77711576c69d2a39e9398cea3fade | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
from flask import (Blueprint, current_app, jsonify, make_response, request)
from gopublish.utils import authenticate_user
import jwt
token = Blueprint('token', __name__, url_prefix='/')
@token.route('/api/token/create', methods=['POST'])
def create_token():
if not request.json:
return make_response(jsonify({'error': 'Missing body'}), 400)
if not (request.json.get("username") and request.json.get("password")):
return make_response(jsonify({'error': 'Missing either username or password in body'}), 400)
# Only check ldap in prod
if current_app.config['GOPUBLISH_RUN_MODE'] == "prod":
if not authenticate_user(request.json.get("username"), request.json.get("password"), current_app.config):
return make_response(jsonify({'error': 'Incorrect credentials'}), 401)
expire_date = datetime.utcnow() + timedelta(hours=current_app.config.get('TOKEN_DURATION'))
token = jwt.encode({"username": request.json.get("username"), "exp": expire_date}, current_app.config['SECRET_KEY'], algorithm="HS256")
return make_response(jsonify({'token': token}), 200)
| 40 | 139 | 0.712069 | 148 | 1,160 | 5.418919 | 0.432432 | 0.082294 | 0.087282 | 0.124688 | 0.129676 | 0.092269 | 0 | 0 | 0 | 0 | 0 | 0.01503 | 0.139655 | 1,160 | 28 | 140 | 41.428571 | 0.788577 | 0.019828 | 0 | 0 | 0 | 0 | 0.198238 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0.176471 | 0.235294 | 0 | 0.529412 | 0.117647 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
c74399a9057111212f6691a685897f9bfbc4f04f | 2,874 | py | Python | exmo_lib.py | autocryptools/exmolib | 9fb635b476aa3a2fad62bbc1131948a6bddd37fc | [
"MIT"
] | null | null | null | exmo_lib.py | autocryptools/exmolib | 9fb635b476aa3a2fad62bbc1131948a6bddd37fc | [
"MIT"
] | null | null | null | exmo_lib.py | autocryptools/exmolib | 9fb635b476aa3a2fad62bbc1131948a6bddd37fc | [
"MIT"
] | null | null | null | import requests
# Список сделок по валютной паре
def trad(name):
get = requests.get('https://api.exmo.com/v1/trades/?pair=' + name)
get = (get.json())
return get
# Книга ордеров по валютной паре
def order(name, limit):
get = requests.get('https://api.exmo.com/v1/order_book/?pair=' + name + '&limit=' + str(limit))
get = (get.json())
return get
# Cтатистика цен и объемов торгов по валютным парам
def ticker():
get = requests.get('https://api.exmo.com/v1/ticker/')
get = (get.json())
return get
# Настройки валютных пар
def settings():
get = requests.get('https://api.exmo.com/v1/pair_settings/')
get = (get.json())
return get
# Cписок валют биржи
def currency():
get = requests.get('https://api.exmo.com/v1/currency/')
get = (get.json())
return get
class pair:
def __init__(self, name, limit=10, command='ask_top'):
self.name = name
self.limit = limit
self.command = command
# high - максимальная цена сделки за 24 часа
# low - минимальная цена сделки за 24 часа
# avg - средняя цена сделки за 24 часа
# vol - объем всех сделок за 24 часа
# vol_curr - сумма всех сделок за 24 часа
# last_trade - цена последней сделки
# buy_price - текущая максимальная цена покупки
# sell_price - текущая минимальная цена продажи
# updated - дата и время обновления данных
def ticker(self):
get = requests.get('https://api.exmo.com/v1/ticker/')
get = (get.json())
return get[self.name]
# min_quantity - минимальное кол-во по ордеру
# max_quantity - максимальное кол-во по ордеру
# min_price - минимальная цена по ордеру
# max_price - максимальная цена по ордеру
# min_amount - минимальная сумма по ордеру
# max_amount - максимальная сумма по ордеру
def settings(self):
get = requests.get('https://api.exmo.com/v1/pair_settings/')
get = (get.json())
return get[self.name]
# ask_quantity - объем всех ордеров на продажу
# ask_amount - сумма всех ордеров на продажу
# ask_top - минимальная цена продажи
# bid_quantity - объем всех ордеров на покупку
# bid_amount - сумма всех ордеров на покупку
# bid_top - максимальная цена покупки
# bid - список ордеров на покупку, где каждая строка это цена, количество и сумма
# ask - список ордеров на продажу, где каждая строка это цена, количество и сумма
def order_book(self, command):
self.command = command
data = order(self.name, self.limit)[self.name][self.command]
return data
# trade_id - идентификатор сделки
# type - тип сделки
# price - цена сделки
# quantity - кол-во по сделке
# amount - сумма сделки
# date - дата и время сделки в формате Unix
def trades(self):
data = trad(self.name)[self.name]
return data
| 29.326531 | 99 | 0.653097 | 395 | 2,874 | 4.686076 | 0.270886 | 0.034576 | 0.052944 | 0.071853 | 0.386818 | 0.231767 | 0.231767 | 0.223123 | 0.131821 | 0.127499 | 0 | 0.00874 | 0.243563 | 2,874 | 97 | 100 | 29.628866 | 0.842686 | 0.465205 | 0 | 0.536585 | 0 | 0 | 0.175217 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.243902 | false | 0 | 0.02439 | 0 | 0.512195 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
c784960d26b706760a966bebf17e3b14a38734f5 | 965 | py | Python | rosalind/spec.py | AntoineAugusti/katas | 2fa650ba3bcad105e93b2b337e0e8d39760c3b38 | [
"MIT"
] | 7 | 2016-01-17T13:21:57.000Z | 2019-11-24T23:20:13.000Z | rosalind/spec.py | AntoineAugusti/katas | 2fa650ba3bcad105e93b2b337e0e8d39760c3b38 | [
"MIT"
] | null | null | null | rosalind/spec.py | AntoineAugusti/katas | 2fa650ba3bcad105e93b2b337e0e8d39760c3b38 | [
"MIT"
] | null | null | null | # http://rosalind.info/problems/spec/
MONOISOTOPIC_MASS_TABLE = {
'A': 71.03711,
'C': 103.00919,
'D': 115.02694,
'E': 129.04259,
'F': 147.06841,
'G': 57.02146,
'H': 137.05891,
'I': 113.08406,
'K': 128.09496,
'L': 113.08406,
'M': 131.04049,
'N': 114.04293,
'P': 97.05276,
'Q': 128.05858,
'R': 156.10111,
'S': 87.03203,
'T': 101.04768,
'V': 99.06841,
'W': 186.07931,
'Y': 163.06333,
}
def solve(weights):
# Invert the table and drop some accuracy
invertedTable = dict((round(v, 4), k) for k, v in MONOISOTOPIC_MASS_TABLE.iteritems())
res = ''
for i in range(1, len(weights)):
a = weights[i]
b = weights[i-1]
# Find the appropriate letter thanks to the weight
res += invertedTable[round(a-b, 4)]
return res
f = open("rosalind_spec.txt", "r")
weights = []
for content in f:
weights.append(float(content))
print solve(weights)
| 20.978261 | 90 | 0.559585 | 141 | 965 | 3.794326 | 0.695035 | 0.059813 | 0.078505 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.223001 | 0.26114 | 965 | 45 | 91 | 21.444444 | 0.527349 | 0.128497 | 0 | 0 | 0 | 0 | 0.0454 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.028571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c7869df18c3e0532926e362932f07b7918b110ba | 845 | py | Python | level2scraper/Subscribers/Bitmex/DataObject.py | cryptassic/level2Scraper | 209ee769439c04f8db6e9ca941b89b6a1922a037 | [
"MIT"
] | null | null | null | level2scraper/Subscribers/Bitmex/DataObject.py | cryptassic/level2Scraper | 209ee769439c04f8db6e9ca941b89b6a1922a037 | [
"MIT"
] | null | null | null | level2scraper/Subscribers/Bitmex/DataObject.py | cryptassic/level2Scraper | 209ee769439c04f8db6e9ca941b89b6a1922a037 | [
"MIT"
] | null | null | null |
class BitmexDataIterator():
def __init__(self,data):
self._data = data
self._index = 0
def __next__(self):
pass
class BitmexDataStructure():
"""
Data Object for Bitmex data manipulation
"""
def __init__(self,symbol,use_compression=False):
self._symbol = symbol
self._header = None
self._data = None
self._size = None
self._use_compression = use_compression
def add_data(self,data):
if self._data:
self._ensure_correct_data_format(data)
pass
else:
pass
def save_data(self):
if self._use_compression:
pass
else:
pass
def _ensure_correct_data_format(self,data_to_compare):
pass
def __iter__(self):
return BitmexDataIterator(self) | 22.236842 | 58 | 0.592899 | 92 | 845 | 5 | 0.358696 | 0.104348 | 0.047826 | 0.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00177 | 0.331361 | 845 | 38 | 59 | 22.236842 | 0.812389 | 0.047337 | 0 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0.214286 | 0 | 0.035714 | 0.357143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
c7871c9039707712331072de3aa57eb35c24cc1b | 1,315 | py | Python | plugins/size_canvas_indicator/size_canvas_indicator.py | gil9red/fake-painter | 8008f4b9a156e8363fce464310c20d229114af47 | [
"MIT"
] | 3 | 2019-06-11T19:13:40.000Z | 2020-10-27T06:06:43.000Z | plugins/size_canvas_indicator/size_canvas_indicator.py | gil9red/fake-painter | 8008f4b9a156e8363fce464310c20d229114af47 | [
"MIT"
] | null | null | null | plugins/size_canvas_indicator/size_canvas_indicator.py | gil9red/fake-painter | 8008f4b9a156e8363fce464310c20d229114af47 | [
"MIT"
] | 1 | 2021-05-22T05:38:15.000Z | 2021-05-22T05:38:15.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of fake-painter, by Ilya Petrash
# and is licensed under the MIT license, under the terms listed within
# LICENSE which is included with the source of this package
__author__ = 'ipetrash'
from iplugin import IPlugin
from PySide.QtGui import QLabel
class PluginSizeCanvasIndicator(IPlugin):
def __init__(self, data_singleton):
self.data_singleton = data_singleton
self.label = QLabel()
self.mw = self.data_singleton.mainWindow
def name(self):
return 'Size Canvas Indicator'
def version(self):
return '0.0.1'
def description(self):
return 'Size Canvas Indicator'
def initialize(self):
self.label.setVisible(True)
self.mw.ui.statusbar.addWidget(self.label)
# self.mw.ui.statusbar.addPermanentWidget(self.label, 1)
self.mw.send_new_image_size.connect(self.update_label)
canvas = self.mw.get_current_canvas()
if canvas is not None:
self.update_label(canvas.width(), canvas.height())
def destroy(self):
self.mw.ui.statusbar.removeWidget(self.label)
self.mw.send_new_image_size.disconnect(self.update_label)
def update_label(self, w, h):
self.label.setText('{} x {}'.format(w, h))
| 27.978723 | 70 | 0.677567 | 176 | 1,315 | 4.926136 | 0.482955 | 0.048443 | 0.058824 | 0.058824 | 0.124567 | 0.124567 | 0 | 0 | 0 | 0 | 0 | 0.005831 | 0.21749 | 1,315 | 46 | 71 | 28.586957 | 0.836735 | 0.209886 | 0 | 0.076923 | 0 | 0 | 0.060019 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.269231 | false | 0 | 0.076923 | 0.115385 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
c79392b2fcdda14f81b95981762459a637e3e1a2 | 2,493 | py | Python | gap/tracker.py | Labgoo/google-analytics-for-python | a9572e881f55a4717a1e77b19e18a07ed8110ddb | [
"MIT"
] | null | null | null | gap/tracker.py | Labgoo/google-analytics-for-python | a9572e881f55a4717a1e77b19e18a07ed8110ddb | [
"MIT"
] | null | null | null | gap/tracker.py | Labgoo/google-analytics-for-python | a9572e881f55a4717a1e77b19e18a07ed8110ddb | [
"MIT"
] | null | null | null | __author__ = 'minhtule'
from request import *
class Tracker(object):
"""
"""
def __init__(self, tracking_id, visitor):
self.__tracking_id = tracking_id
self.__visitor = visitor
self.__debug_enabled = False
@property
def tracking_id(self):
return self.__tracking_id
@property
def client_id(self):
return self.__visitor.id
@property
def visitor(self):
return self.__visitor
@property
def debug_enabled(self):
return self.__debug_enabled
@debug_enabled.setter
def debug_enabled(self, value):
self.__debug_enabled = value
@property
def original_request_ip(self):
return self.visitor.ip_address
@property
def original_request_user_agent(self):
return self.visitor.user_agent
@property
def original_request_language(self):
return self.visitor.language
# Public method
def send_page(self, hostname=None, path=None, title=None):
PageTrackingRequest(
self,
document_hostname=hostname if hostname else self.visitor.document_host,
document_path=path if path else self.visitor.document_path,
document_title=title
).send()
def send_transaction(self, transaction_id, transaction_affiliation=None, transaction_revenue=None, transaction_shipping=None, transaction_tax=None, currency_code=None):
TransactionTrackingRequest(
self,
transaction_id,
transaction_affiliation=transaction_affiliation,
transaction_revenue=transaction_revenue,
transaction_shipping=transaction_shipping,
transaction_tax=transaction_tax,
currency_code=currency_code
).send()
def send_item(self, transaction_id, item_name, item_price=None, item_quantity=None, item_code=None, item_category=None, currency_code=None):
ItemTrackingRequest(
self,
transaction_id,
item_name,
item_price=item_price,
item_quantity=item_quantity,
item_code=item_code,
item_category=item_category,
currency_code=currency_code
).send()
class CustomVariable(object):
@property
def index(self):
return self.__index
@property
def value(self):
return self.__value
def __init__(self, index, value):
self.__index = index
self.__value = value | 27.097826 | 172 | 0.657842 | 267 | 2,493 | 5.764045 | 0.213483 | 0.064327 | 0.081871 | 0.068226 | 0.131254 | 0.044185 | 0.044185 | 0 | 0 | 0 | 0 | 0 | 0.269956 | 2,493 | 92 | 173 | 27.097826 | 0.845604 | 0.005215 | 0 | 0.275362 | 0 | 0 | 0.003243 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.217391 | false | 0 | 0.014493 | 0.130435 | 0.391304 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
c79b7ae0adf51fe937c48652d262890e20f1650a | 1,095 | py | Python | psdns/equations/__init__.py | lanl/PsDNS | 2fcb12d52e522906c93d7a28e5397cae81feb376 | [
"BSD-3-Clause"
] | 1 | 2022-03-10T21:34:31.000Z | 2022-03-10T21:34:31.000Z | psdns/equations/__init__.py | lanl/PsDNS | 2fcb12d52e522906c93d7a28e5397cae81feb376 | [
"BSD-3-Clause"
] | null | null | null | psdns/equations/__init__.py | lanl/PsDNS | 2fcb12d52e522906c93d7a28e5397cae81feb376 | [
"BSD-3-Clause"
] | null | null | null | r"""Psuedo-spectral implementations of some useful equations
In PsDNS, equations are represented as class objects that implement a
:meth:`rhs` method. The :meth:`rhs` method takes one argument,
*uhat*, which is the solution vector, normally in spectral space
expressed as a :class:`~psdns.bases.SpectralArray`, and it returns the
right-hand side vector. That is, for the PDE
.. math::
\frac{\partial}{\partial t} \boldsymbol{U}(t)
= \boldsymbol{F}[\boldsymbol{U}]
the :meth:`rhs` method takes :math:`\boldsymbol{U}` and returns
:math:`\boldsymbol{F}[\boldsymbol{U}]`.
Since normally only a single equation is needed in a given script,
no equations are imported by default when importing :mod:`psdns`.
Users can also implement their own equations. There is no need to
subclass from any specific base class, any class that implements a
:meth:`rhs` method can be used as an equation.
The :mod:`~psdns.equations` sub-module also includes some functions
that return initial conditions for certain canonical problems, either
in the form of stand-alone functions, or class methods.
"""
| 39.107143 | 70 | 0.757078 | 169 | 1,095 | 4.905325 | 0.573965 | 0.033776 | 0.062726 | 0.033776 | 0.050663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.146119 | 1,095 | 27 | 71 | 40.555556 | 0.886631 | 0.991781 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c79fe68c9b703989e0d95ac1a25fed10ab6dea51 | 426 | py | Python | mvpsite/custom_helpers/messages.py | mianamir/advance_django_rest_framework_project | 3870f2dbe7b585a236928f90c1792cd337ce8911 | [
"MIT"
] | null | null | null | mvpsite/custom_helpers/messages.py | mianamir/advance_django_rest_framework_project | 3870f2dbe7b585a236928f90c1792cd337ce8911 | [
"MIT"
] | null | null | null | mvpsite/custom_helpers/messages.py | mianamir/advance_django_rest_framework_project | 3870f2dbe7b585a236928f90c1792cd337ce8911 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Provides custom messages.
"""
ADD_RECORD_NOT_ALLOWED = 'Adding record is not allowed.'
SUCCESSFULL_RESPONSE_MESSAGE = 'You have successfully completed this operation.'
FAILED_RESPONSE_MESSAGE = 'Error: you have attempted wrong operation.'
VENDING_MACHINE_COINS_VALID_MESSAGE = f'users with a “buyer” role can only deposit 5, 10, 20, 50 and 100 cent coins into their vending machine account.'
| 25.058824 | 152 | 0.776995 | 61 | 426 | 5.245902 | 0.803279 | 0.0625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027397 | 0.143192 | 426 | 16 | 153 | 26.625 | 0.849315 | 0.107981 | 0 | 0 | 0 | 0.25 | 0.618919 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c7b15a05e538a521b829f04133128b2116844221 | 1,418 | py | Python | setup.py | Kasape/pypylon-opencv-viewer | 888357c3f1c16b9a1e16a2017a4b5d3b2b957a80 | [
"MIT"
] | 26 | 2018-10-02T11:17:42.000Z | 2022-02-23T21:57:32.000Z | setup.py | magiczyf/pypylon-opencv-viewer | 060fb11135f5bdabf2c5f41a7e8687b9ddd5d881 | [
"MIT"
] | 3 | 2019-08-01T23:30:11.000Z | 2020-08-25T09:01:09.000Z | setup.py | magiczyf/pypylon-opencv-viewer | 060fb11135f5bdabf2c5f41a7e8687b9ddd5d881 | [
"MIT"
] | 11 | 2018-10-03T12:42:01.000Z | 2021-12-15T03:11:53.000Z | import os
from setuptools import find_packages, setup
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
with open('README.md') as f:
long_description = f.read()
setup(
name='pypylon-opencv-viewer',
packages=find_packages(),
version='1.0.3',
description='Impro function application while saving and getting image',
long_description=long_description,
long_description_content_type='text/markdown',
license='MIT License',
author='Maksym Balatsko',
author_email='mbalatsko@gmail.com',
url='https://github.com/mbalatsko/pypylon-opencv-viewer',
download_url='https://github.com/mbalatsko/pypylon-opencv-viewer/archive/1.0.3.tar.gz',
install_requires=[
'jupyter',
'pypylon',
'ipywidgets',
'ipython'
],
keywords=['basler', 'pypylon', 'opencv', 'jypyter', 'pypylon viewer', 'opencv pypylon'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Operating System :: OS Independent'
],
) | 35.45 | 92 | 0.639633 | 161 | 1,418 | 5.540373 | 0.490683 | 0.191704 | 0.252242 | 0.262332 | 0.161435 | 0.100897 | 0.100897 | 0.100897 | 0 | 0 | 0 | 0.020481 | 0.208039 | 1,418 | 40 | 93 | 35.45 | 0.77382 | 0 | 0 | 0.054054 | 0 | 0.027027 | 0.508104 | 0.014799 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.054054 | 0 | 0.054054 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c7c0c0a4f5ca1699cb3fa80bb52496d47fcbaec8 | 736 | py | Python | ProsperCron/{{cookiecutter.project_name}}/{{cookiecutter.library_name}}/{{cookiecutter.cli_name}}.py | EVEprosper/ProsperCookiecutters | 569ca0c311a5ead2b49f0cdde4cb2ad14dcd3a2c | [
"MIT"
] | null | null | null | ProsperCron/{{cookiecutter.project_name}}/{{cookiecutter.library_name}}/{{cookiecutter.cli_name}}.py | EVEprosper/ProsperCookiecutters | 569ca0c311a5ead2b49f0cdde4cb2ad14dcd3a2c | [
"MIT"
] | null | null | null | ProsperCron/{{cookiecutter.project_name}}/{{cookiecutter.library_name}}/{{cookiecutter.cli_name}}.py | EVEprosper/ProsperCookiecutters | 569ca0c311a5ead2b49f0cdde4cb2ad14dcd3a2c | [
"MIT"
] | null | null | null | """launcher/wrapper for executing CLI"""
from os import path
import platform
import logging
from plumbum import cli
import prosper.common.prosper_cli as p_cli
import prosper.common.prosper_logging as p_logging
import prosper.common.prosper_config as p_config
from . import _version
HERE = path.abspath(path.dirname(__file__))
class {{cookiecutter.cli_name}}CLI(p_cli.ProsperApplication):
PROGNAME = _version.PROGNAME
VERSION = _version.__version__
config_path = path.join(HERE, 'app.cfg')
def main(self):
"""launcher logic"""
self.logger.info('hello world')
def run_main():
"""entry point for launching app"""
{{cookiecutter.cli_name}}CLI.run()
if __name__ == '__main__':
run_main()
| 23 | 61 | 0.728261 | 99 | 736 | 5.10101 | 0.434343 | 0.077228 | 0.112871 | 0.154455 | 0.114851 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.161685 | 736 | 31 | 62 | 23.741935 | 0.818477 | 0 | 0 | 0 | 0 | 0 | 0.040562 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.421053 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
c7c3e6cf1622b4615ac8a67c8aa6d802571777f5 | 382 | py | Python | ptrack/templatetags/ptrack.py | unformatt/django-ptrack | 9f87b6ed37ec72525de513376f31566bb14d2b9c | [
"Apache-2.0"
] | null | null | null | ptrack/templatetags/ptrack.py | unformatt/django-ptrack | 9f87b6ed37ec72525de513376f31566bb14d2b9c | [
"Apache-2.0"
] | null | null | null | ptrack/templatetags/ptrack.py | unformatt/django-ptrack | 9f87b6ed37ec72525de513376f31566bb14d2b9c | [
"Apache-2.0"
] | null | null | null | """Ptrack Template Tag"""
import logging
from django import template
from django.utils.html import mark_safe
logger = logging.getLogger(__name__)
register = template.Library()
@register.simple_tag
def ptrack(*args, **kwargs):
"""Generate a tracking pixel html img element."""
from ptrack import create_img
img = create_img(*args, **kwargs)
return mark_safe(img)
| 22.470588 | 53 | 0.735602 | 51 | 382 | 5.333333 | 0.54902 | 0.073529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.157068 | 382 | 16 | 54 | 23.875 | 0.844721 | 0.164921 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.4 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
c7c874dd64e023ef97934c77e6f714b0809ae51b | 192 | py | Python | vbbot/urls.py | kirichk/crewing_platform | a7d227811588a91e5db789d7d252c172b0ba3b58 | [
"MIT"
] | null | null | null | vbbot/urls.py | kirichk/crewing_platform | a7d227811588a91e5db789d7d252c172b0ba3b58 | [
"MIT"
] | 1 | 2022-02-16T18:34:19.000Z | 2022-02-16T18:34:19.000Z | vbbot/urls.py | kirichk/crewing_platform | a7d227811588a91e5db789d7d252c172b0ba3b58 | [
"MIT"
] | null | null | null | from django.urls import path
from django.conf import settings
from vbbot import views
app_name = 'vbbot'
token = settings.VIBER_TOKEN
urlpatterns = [
path(token, views.viber_app),
]
| 17.454545 | 33 | 0.744792 | 27 | 192 | 5.185185 | 0.518519 | 0.142857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.177083 | 192 | 10 | 34 | 19.2 | 0.886076 | 0 | 0 | 0 | 0 | 0 | 0.026042 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.375 | 0 | 0.375 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
c7cd963c97220b0d110b5b308e11595d13e91187 | 668 | py | Python | helpers.py | MattJAshworth/UniSecrets | 9a6bd50cf32cf5231e68c7cd465ad19aa06a95df | [
"MIT"
] | null | null | null | helpers.py | MattJAshworth/UniSecrets | 9a6bd50cf32cf5231e68c7cd465ad19aa06a95df | [
"MIT"
] | null | null | null | helpers.py | MattJAshworth/UniSecrets | 9a6bd50cf32cf5231e68c7cd465ad19aa06a95df | [
"MIT"
] | null | null | null | import facebook
import os
# Constants
app_id = '377531436400407'
app_key = '50f56416e1297dfd609b3a7b2ef1ef90'
access_token = 'EAAFXXOMofxcBAIcctnErMVmOTZB2riOf1fDy6rrXUPYZBnPeujHHDP5rpK5xz5FG3vEiQcxTZBKNYAiIaafrDsrPBmAYnl4SYVYZAJN7te1ATLpuZBjVXTUkPeFwV2tLQb7juZBGxdMwXxfM6WJeVBIIDZA48ZAkBAHyUyZCyjZAg5CwZDZD'
def fbpost(msg, img):
# Post to Facebook
try:
graph = facebook.GraphAPI(access_token)
#graph.put_wall_post(message=random_quote())
#graph.put_photo(image=open(img, 'rb'),message=msg, profile_id='843586662650752')
graph.put_object(parent_object='me', connection_name='feed', message=msg)
except Exception as e:
print(e)
pass | 30.363636 | 198 | 0.802395 | 64 | 668 | 8.1875 | 0.6875 | 0.045802 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 0.110778 | 668 | 22 | 199 | 30.363636 | 0.771044 | 0.223054 | 0 | 0 | 0 | 0 | 0.454369 | 0.413592 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0.083333 | 0.166667 | 0 | 0.25 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
c7db9ab40ecfafa2942cbfa086b208e36734d6e2 | 135 | py | Python | 11_class_initialization/twelth_class.py | utkarshsaraf19/python-object-oriented-programming | f36d7b72567a3dc5c21da7653e0db6274a3f5516 | [
"CC0-1.0"
] | null | null | null | 11_class_initialization/twelth_class.py | utkarshsaraf19/python-object-oriented-programming | f36d7b72567a3dc5c21da7653e0db6274a3f5516 | [
"CC0-1.0"
] | null | null | null | 11_class_initialization/twelth_class.py | utkarshsaraf19/python-object-oriented-programming | f36d7b72567a3dc5c21da7653e0db6274a3f5516 | [
"CC0-1.0"
] | null | null | null | class RequiredClass:
pass
def main():
required = RequiredClass()
print(required)
if __name__ == '__main__':
main()
| 11.25 | 30 | 0.62963 | 13 | 135 | 5.923077 | 0.692308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.251852 | 135 | 11 | 31 | 12.272727 | 0.762376 | 0 | 0 | 0 | 0 | 0 | 0.059259 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0.142857 | 0 | 0 | 0.285714 | 0.142857 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
c7f2aaba63a525abe9a69868cc54f74c5b69d0d9 | 975 | py | Python | setup.py | iacchus/colors.py | 38446c0631088723ce982695ff50868370d4581e | [
"BSD-2-Clause"
] | null | null | null | setup.py | iacchus/colors.py | 38446c0631088723ce982695ff50868370d4581e | [
"BSD-2-Clause"
] | null | null | null | setup.py | iacchus/colors.py | 38446c0631088723ce982695ff50868370d4581e | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
"""
colors.py
=========
Convert colors between rgb, hsv, and hex, perform arithmetic, blend modes,
and generate random colors within boundaries.
"""
from setuptools import setup, find_packages
setup(
name='colors.py',
version='0.2.2',
author='Iacchus',
author_email='kassivs.@gmail.com',
url='https://github.com/iacchus/color.py',
description='Convert and manipulate color values',
long_description=__doc__,
license='BSD',
packages=find_packages(),
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python',
'Topic :: Software Development',
]
)
| 29.545455 | 74 | 0.64 | 105 | 975 | 5.857143 | 0.647619 | 0.123577 | 0.162602 | 0.126829 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010512 | 0.219487 | 975 | 32 | 75 | 30.46875 | 0.797635 | 0.165128 | 0 | 0 | 1 | 0 | 0.527295 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.041667 | 0 | 0.041667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
1bde984571b7f4f4e48121c1095f28ca435b7f01 | 6,195 | py | Python | PyFile/pyfile.py | chyka-dev/PyFile | a52e69c712c10934bc88c0b75b3f536e12303c83 | [
"MIT"
] | null | null | null | PyFile/pyfile.py | chyka-dev/PyFile | a52e69c712c10934bc88c0b75b3f536e12303c83 | [
"MIT"
] | null | null | null | PyFile/pyfile.py | chyka-dev/PyFile | a52e69c712c10934bc88c0b75b3f536e12303c83 | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
import os
import six
from .pystring import PyString
class PyFile(object):
"""More human-friendly file access interface.
Works on Python2 and 3.
Usage:
file = File(".bashrc")
file.write("Hello, world!!")
print(file.read())
del file
"""
class Mode:
r = "rb"
w = "wb+"
a = "ab+"
def __init__(self, path, encoding="utf-8"):
self.path = path
self.encoding = encoding
self.mode = None
self._fd = None
def __del__(self):
self.ensure_close()
def __str__(self):
return "<File object: path={}, encoding={} mode={}".format(
self.path, self.encoding, self.mode
)
def __iter__(self):
self.ensure_open(self.Mode.r)
for l in self._fd:
yield PyString(l)
def statinfo(self):
return os.stat(self.path)
def size(self):
return os.stat(self.path).st_size
def top(self):
"""
Usage:
>>> file = File("hello.txt")
>>> print(file.read())
hello, world
>>> print(file.read())
>>> print(file.top())
>>> print(file.read())
hello, world
"""
return self.seek(0)
def end(self):
"""
Usage:
>>> file = File("hello.txt")
>>> print(file.end())
>>> print(file.read())
"""
return self.seek(0, 2)
def seek(self, *args, **kwargs):
"""WIP what about mode?"""
return self._fd.seek(*args, **kwargs)
def truncate(self, *args, **kwargs):
"""WIP How should I work??"""
return self._fd.truncate(*args, **kwargs)
def read(self, *args, **kwargs):
"""
Usage:
>>> file = File("hello.txt")
>>> print(file.read())
hello, world
"""
self.ensure_open(self.Mode.r)
return PyString(self._fd.read(*args, **kwargs))
def readline(self, *args, **kwargs):
self.ensure_open(self.Mode.r)
return PyString(self._fd.readline(*args, **kwargs))
def readlines(self, *args, **kwargs):
self.ensure_open(self.Mode.r)
return (PyString(s) for s in self._fd.readlines(*args, **kwargs))
def write(self, data, *args, **kwargs):
"""
Usage:
>>> file = File("hello.txt")
>>> file.write("hello, world")
>>> print(file.read())
hello, world
"""
self.ensure_open(self.Mode.w)
return self.__write(data, *args, **kwargs)
def writelines(self, seq, *args, **kwargs):
"""
Usage:
>>> file = File("hello.txt")
>>> file.writelines(["hello", "world"])
>>> print(file.read())
hello
world
"""
self.ensure_open(self.Mode.w)
seq = [self.__ensure_nl(line) for line in seq]
return self.__writelines(seq, *args, **kwargs)
def append(self, data, *args, **kwargs):
"""
Usage:
>>> file = File("hello.txt")
>>> print(file.read())
hello
>>> file.append(", world")
>>> print(file.read())
hello, world
"""
self.ensure_open(self.Mode.a)
return self.__write(data, *args, **kwargs)
def appendlines(self, seq, *args, **kwargs):
"""
Usage:
>>> file = File("hello.txt")
>>> print(file.read())
hello
>>> file.appendlines(["world", "!!"])
>>> print(file.read())
hello
world
!!
"""
self.ensure_open(self.Mode.a)
seq = [self.__ensure_nl("")] + [self.__ensure_nl(line) for line in seq]
return self.__writelines(seq, *args, **kwargs)
def open(self, mode, *args, **kwargs):
""" An alias of ensure_open
Usage:
>>> file = File(path, encoding).open(File.Mode.R)
"""
return self.ensure_open(mode, *args, **kwargs)
def close(self, *args, **kwargs):
"""An alias of ensure_close
Usage:
>>> file.close()
"""
self.ensure_close(*args, **kwargs)
def ensure_open(self, mode, *args, **kwargs):
""" Open the file with mode `mode` if not opend.
Usually you don't have to use this method directly.
Use read, write, append,.. methods instead.
Usage:
>>> file.ensure_open(File.Mode.R)
"""
if self._fd and self.mode == mode:
return self
self.mode = mode
self._fd = self.__open(
self.path, mode, *args, **kwargs
)
return self
def ensure_close(self, *args, **kwargs):
"""
Close the file if opened.
Usually you don't have to use this method directly.
Usage:
>>> file.ensure_close()
"""
if not self._fd:
return
self._fd.close(*args, **kwargs)
self._fd = None
self.mode = None
return
def __ensure_nl(self, string):
"""Append new line chars to the end of `string`.
Usage:
>>> assert self.__ensure_nl("") == "\n"
>>> assert self.__ensure_nl("hello") == "hello\n"
"""
if not string.endswith("\n"):
string += "\n"
return string
def __write(self, data, *args, **kwargs):
"""Use this instead of fd.write.
"""
data = PyString(data, self.encoding)
self._fd.write(data.encode(self.encoding), *args, **kwargs)
def __writelines(self, seq, *args, **kwargs):
"""Use this instead of fd.writelines.
"""
seq = [PyString(s, self.encoding).encode(self.encoding) for s in seq]
self._fd.writelines(seq, *args, **kwargs)
def __open(self, *args, **kwargs):
# In python2, open doesn't accept `encoding`.
# In python3, `encoding` cannot be specified on binary mode.
if 'encoding' in kwargs:
del kwargs['encoding']
return open(*args, **kwargs)
| 26.934783 | 80 | 0.501211 | 696 | 6,195 | 4.343391 | 0.172414 | 0.105855 | 0.055905 | 0.053589 | 0.450877 | 0.430367 | 0.382402 | 0.324181 | 0.288124 | 0.257691 | 0 | 0.00222 | 0.345601 | 6,195 | 229 | 81 | 27.052402 | 0.743463 | 0.319774 | 0 | 0.21978 | 0 | 0 | 0.021163 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.274725 | false | 0 | 0.032967 | 0.032967 | 0.56044 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
1be39096bfb4bd1cb1562e5c7bc5c7bdb619032a | 264 | py | Python | Web/Member/FunctionForPI/Client.py | tratitude/BridgeMaster | e3916b077d96f3520d0a8ed9bb548d614465aa2e | [
"Apache-2.0"
] | 1 | 2021-01-05T14:40:08.000Z | 2021-01-05T14:40:08.000Z | Web/Member/FunctionForPI/Client.py | fdmdkw/BridgeMaster | e3916b077d96f3520d0a8ed9bb548d614465aa2e | [
"Apache-2.0"
] | 1 | 2021-10-19T08:05:06.000Z | 2021-10-19T08:05:06.000Z | Web/Member/FunctionForPI/Client.py | fdmdkw/BridgeMaster | e3916b077d96f3520d0a8ed9bb548d614465aa2e | [
"Apache-2.0"
] | 2 | 2019-10-21T15:25:37.000Z | 2021-03-17T06:59:09.000Z | from websocket import create_connection
ws = create_connection("ws://localhost:8000/Member/on_open/MGD4")
print("Sending 'Hello, World'...")
ws.send("Hello, World")
print("Sent")
print("Receiving...")
result = ws.recv()
print("Received '%s'" % result)
ws.close() | 26.4 | 65 | 0.708333 | 36 | 264 | 5.111111 | 0.666667 | 0.173913 | 0.195652 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020747 | 0.087121 | 264 | 10 | 66 | 26.4 | 0.742739 | 0 | 0 | 0 | 0 | 0 | 0.396226 | 0.14717 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0.444444 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
1be949bdf7e32a2c2e1afaf41efcba68735dc3bf | 4,525 | py | Python | mooiter/account.py | Meepnix/Mooiter | 7ee62812ad3f5077695edac090c5ddbfeb00d204 | [
"MIT"
] | 1 | 2015-08-10T13:03:00.000Z | 2015-08-10T13:03:00.000Z | mooiter/account.py | Meepnix/Mooiter | 7ee62812ad3f5077695edac090c5ddbfeb00d204 | [
"MIT"
] | null | null | null | mooiter/account.py | Meepnix/Mooiter | 7ee62812ad3f5077695edac090c5ddbfeb00d204 | [
"MIT"
] | 2 | 2015-06-29T08:05:25.000Z | 2021-07-31T23:23:20.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Mooiter
# Copyright 2010 Christopher Massey
# See LICENCE for details.
import sys
import base64
#Test third party modules
try:
import tweepy
from PyQt4 import QtGui
from PyQt4 import QtCore
except ImportError as e:
print "Import Error" + e
class TwitterAccount(QtGui.QDialog):
def __init__(self, Parent=None):
super(TwitterAccount, self).__init__(Parent)
#Garbage collect on dialog close
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.settings = QtCore.QSettings("cutiepie4", "Mooiter")
self.setWindowTitle("Account")
vbox = QtGui.QVBoxLayout()
hbox = QtGui.QHBoxLayout()
vboxlabels = QtGui.QVBoxLayout()
vboxedits = QtGui.QVBoxLayout()
hboxbuttons = QtGui.QHBoxLayout()
delete = QtGui.QPushButton('&Delete')
buttonbox = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Save|
QtGui.QDialogButtonBox.Close)
#Create horizontal line
seperator = QtGui.QFrame()
seperator.setFrameShape(QtGui.QFrame.HLine)
seperator.setFrameShadow(QtGui.QFrame.Sunken)
self.useredit = QtGui.QLineEdit()
self.passwordedit = QtGui.QLineEdit()
self.useredit.setMinimumWidth(200)
self.passwordedit.setMinimumWidth(200)
self.passwordedit.setEchoMode(QtGui.QLineEdit.Password)
labeluser = QtGui.QLabel("&Username:")
labelpassword = QtGui.QLabel("&Password:")
labeluser.setBuddy(self.useredit)
labelpassword.setBuddy(self.passwordedit)
vboxlabels.addWidget(labeluser)
vboxlabels.addWidget(labelpassword)
vboxedits.addWidget(self.useredit)
vboxedits.addWidget(self.passwordedit)
hboxbuttons.addStretch()
hboxbuttons.addWidget(delete)
hboxbuttons.addWidget(buttonbox)
hbox.addLayout(vboxlabels)
hbox.addLayout(vboxedits)
vbox.addLayout(hbox)
vbox.addWidget(seperator)
vbox.addLayout(hboxbuttons)
self.setLayout(vbox)
self.useredit.setFocus()
self.setTabOrder(self.useredit, self.passwordedit)
self.setTabOrder(delete, buttonbox)
self.connect(buttonbox.button(QtGui.QDialogButtonBox.Save),
QtCore.SIGNAL("clicked()"), self.new_account)
self.connect(buttonbox, QtCore.SIGNAL("rejected()"),
self, QtCore.SLOT("reject()"))
self.connect(delete, QtCore.SIGNAL('clicked()'), self.delete_account)
#Find out if an account already exists
if self.settings.contains("User") and self.settings.contains("use"):
username = base64.b64decode(self.settings.value("User").toString())
password = base64.b64decode(self.settings.value("use").toString())
self.useredit.setText(unicode(username))
self.passwordedit.setText(unicode(password))
def new_account(self):
"""Verfiy and store twitter account details"""
username = self.useredit.text()
password = self.passwordedit.text()
#Verfiy twitter account exists on twitter.
auth = tweepy.BasicAuthHandler(username, password)
api = tweepy.API(auth)
if not api.verify_credentials():
QtGui.QMessageBox.warning(self, 'Warning',
"Could not authenticate twitter account",
QtGui.QMessageBox.Ok)
else:
#Store username and password
self.settings.setValue("User", (QtCore.QVariant\
(base64.b64encode(str(username)))))
self.settings.setValue("use", (QtCore.QVariant\
(base64.b64encode(str(password)))))
#Signal account change to main window
self.emit(QtCore.SIGNAL("changed"))
print "pie"
def delete_account(self):
"""Remove all twitter account details"""
self.settings.remove("User")
self.settings.remove("use")
self.useredit.setText("")
self.passwordedit.setText("")
#Signal account change to main window
self.emit(QtCore.SIGNAL("changed"))
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
meep = TwitterAccount()
meep.show()
sys.exit(app.exec_())
| 34.807692 | 80 | 0.612376 | 426 | 4,525 | 6.450704 | 0.368545 | 0.039301 | 0.010917 | 0.024745 | 0.088792 | 0.042213 | 0.042213 | 0.042213 | 0.042213 | 0.042213 | 0 | 0.009852 | 0.28221 | 4,525 | 129 | 81 | 35.077519 | 0.836207 | 0.080221 | 0 | 0.022989 | 0 | 0 | 0.04824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.16092 | 0.08046 | null | null | 0.022989 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
1bec0e468d60d3895c2e4861059333c712c3e641 | 1,983 | py | Python | python/card-games/lists.py | sci-c0/exercism-learning | dd9fb1d2a407085992c3371c1d56456b7ebf9180 | [
"BSD-3-Clause"
] | null | null | null | python/card-games/lists.py | sci-c0/exercism-learning | dd9fb1d2a407085992c3371c1d56456b7ebf9180 | [
"BSD-3-Clause"
] | null | null | null | python/card-games/lists.py | sci-c0/exercism-learning | dd9fb1d2a407085992c3371c1d56456b7ebf9180 | [
"BSD-3-Clause"
] | null | null | null | """
Elyse is really looking forward to playing some poker (and other card games) during her upcoming trip to Vegas.
Being a big fan of "self-tracking" she wants to put together some small functions that will help her with
tracking tasks and has asked for your help thinking them through.
"""
from typing import List, NoReturn
def get_rounds(number: int) -> List[int]:
"""
:param number: int - current round number.
:return: list - current round and the two that follow.
"""
return list(range(number, number + 3))
def concatenate_rounds(rounds_1: List[int], rounds_2: List[int]) -> List[int]:
"""
:param rounds_1: list - first rounds played.
:param rounds_2: list - second set of rounds played.
:return: list - all rounds played.
"""
return rounds_1 + rounds_2
def list_contains_round(rounds: List[int], number: int) -> bool:
"""
:param rounds: list - rounds played.
:param number: int - round number.
:return: bool - was the round played?
"""
return number in rounds
def card_average(hand: List[int]) -> float:
"""
:param hand: list - cards in hand.
:return: float - average value of the cards in the hand.
"""
return sum(hand) / len(hand)
def approx_average_is_average(hand: List[int]) -> bool:
"""
:param hand: list - cards in hand.
:return: bool - is approximate average the same as true average?
"""
return card_average(hand) in {
hand[len(hand) // 2],
(hand[0] + hand[-1]) // 2
}
def average_even_is_average_odd(hand: List[int]) -> bool:
"""
:param hand: list - cards in hand.
:return: bool - are even and odd averages equal?
"""
return card_average(hand[::2]) == card_average(hand[1::2])
def maybe_double_last(hand: List[int]) -> NoReturn:
"""
:param hand: list - cards in hand.
:return: list - hand with Jacks (if present) value doubled.
"""
if hand[-1] == 11:
hand[-1] = 22
| 26.092105 | 111 | 0.636409 | 283 | 1,983 | 4.378092 | 0.35689 | 0.050847 | 0.048426 | 0.058111 | 0.127522 | 0.127522 | 0.127522 | 0.079096 | 0.079096 | 0.079096 | 0 | 0.013289 | 0.241049 | 1,983 | 75 | 112 | 26.44 | 0.809967 | 0.502774 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.368421 | false | 0 | 0.052632 | 0 | 0.736842 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
1bf50f82fefeed5f6d8d197647dcc1c1b1e5815e | 484 | py | Python | compiler/tests/test_futures_sum.py | NetSys/kappa | de1ab3393d1e6358f66427645c77833d4dc99693 | [
"BSD-2-Clause"
] | 34 | 2018-07-02T22:02:36.000Z | 2021-12-08T22:01:38.000Z | compiler/tests/test_futures_sum.py | NetSys/kappa | de1ab3393d1e6358f66427645c77833d4dc99693 | [
"BSD-2-Clause"
] | 1 | 2019-07-01T16:02:04.000Z | 2019-07-01T16:11:26.000Z | compiler/tests/test_futures_sum.py | NetSys/kappa | de1ab3393d1e6358f66427645c77833d4dc99693 | [
"BSD-2-Clause"
] | 10 | 2018-07-09T02:30:21.000Z | 2022-03-21T08:46:38.000Z | import operator
import rt
def parallel_sum(l, r):
"""Computes (l + (l+1) + ... + r)."""
# TODO(zhangwen): this function can either return an int or a future; this seems confusing...
if l == r:
return l
m = (l + r) // 2
sl = parallel_sum(l, m)
sr = parallel_sum(m + 1, r)
return rt.spawn(operator.add, (sl, sr))
def handler(event, context):
n = event["n"]
if n == 1:
return 1
else:
return parallel_sum(1, n).wait()
| 20.166667 | 97 | 0.549587 | 75 | 484 | 3.493333 | 0.493333 | 0.167939 | 0.091603 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017595 | 0.295455 | 484 | 23 | 98 | 21.043478 | 0.750733 | 0.256198 | 0 | 0 | 0 | 0 | 0.002825 | 0 | 0 | 0 | 0 | 0.043478 | 0 | 1 | 0.133333 | false | 0 | 0.133333 | 0 | 0.533333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
1bf7409eee7ee3ed6535e8083a999f8127b74a72 | 1,217 | py | Python | src/priorityq.py | sm2774us/Data-Structures-And-Algorithms-In-Python | a9d03cc3f9136178bfe87ec63ae63ddb53450277 | [
"MIT"
] | 33 | 2017-10-12T16:52:04.000Z | 2021-11-29T13:39:37.000Z | src/priorityq.py | sm2774us/Data-Structures-And-Algorithms-In-Python | a9d03cc3f9136178bfe87ec63ae63ddb53450277 | [
"MIT"
] | 4 | 2017-03-28T04:09:58.000Z | 2017-06-20T23:30:30.000Z | src/priorityq.py | sm2774us/Data-Structures-And-Algorithms-In-Python | a9d03cc3f9136178bfe87ec63ae63ddb53450277 | [
"MIT"
] | 13 | 2017-04-15T15:09:29.000Z | 2021-06-09T10:45:44.000Z | """Python implementation of a priorityq."""
from src.binheap import Binheap
class PriorityQ(object):
"""
Priority Q data structure.
Following methods are supported.
Insert(value, [priority]): inserts a value into the queue.
Takes an optional argument for that value's priority.
pop(): removes the most important item from the queue
and returns its value.
peek(): returns the most important item without removing it from the queue.
"""
def __init__(self):
"""Initialize priorityq."""
self._container = Binheap()
def insert(self, val, priority=0):
"""Insert a val into the queue with an argument for the priority."""
self._container.push((priority, val))
def pop(self):
"""Remove the most important item from the queue."""
to_return = self._container.container[1][1]
if not to_return:
raise(IndexError, 'Can\'t pop from an empty queue.')
self._container.pop()
return to_return
def peek(self):
"""Return the most important item without removing it."""
try:
return self._container.container[1][1]
except IndexError:
return None
| 29.682927 | 79 | 0.637634 | 154 | 1,217 | 4.961039 | 0.435065 | 0.052356 | 0.08377 | 0.104712 | 0.259162 | 0.259162 | 0.180628 | 0 | 0 | 0 | 0 | 0.005593 | 0.265407 | 1,217 | 40 | 80 | 30.425 | 0.848993 | 0.451109 | 0 | 0 | 0 | 0 | 0.006634 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0 | 0.058824 | 0 | 0.529412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.