hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ce334240496788f670cc68b34ae034aab8919c52
| 41,740
|
py
|
Python
|
intrepid/geophysical_models_pb2.py
|
intrepid-geophysics/intrepid-protobuf-py
|
e01a11e139b0ed3bb9500a8153939d7acfa8b3b4
|
[
"Apache-2.0"
] | 1
|
2020-07-08T04:41:52.000Z
|
2020-07-08T04:41:52.000Z
|
intrepid/geophysical_models_pb2.py
|
intrepid-geophysics/intrepid-protobuf-py
|
e01a11e139b0ed3bb9500a8153939d7acfa8b3b4
|
[
"Apache-2.0"
] | null | null | null |
intrepid/geophysical_models_pb2.py
|
intrepid-geophysics/intrepid-protobuf-py
|
e01a11e139b0ed3bb9500a8153939d7acfa8b3b4
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: geophysical_models.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import intrepid.commontaskmodel_pb2 as commontaskmodel__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='geophysical_models.proto',
package='geophy',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x18geophysical_models.proto\x12\x06geophy\x1a\x15\x63ommontaskmodel.proto\"\xd3\x01\n\x14InducedMagneticField\x12\r\n\x02hx\x18\x01 \x01(\x01:\x01\x30\x12\r\n\x02hy\x18\x02 \x01(\x01:\x01\x30\x12\r\n\x02hz\x18\x03 \x01(\x01:\x01\x30\x12\x13\n\x08geofield\x18\x04 \x01(\x01:\x01\x30\x12\x12\n\x07\x41zimuth\x18\x05 \x01(\x01:\x01\x30\x12\x17\n\x0bInclination\x18\x06 \x01(\x01:\x02\x39\x30\x12\x14\n\tRemenance\x18\x1e \x01(\x01:\x01\x30\x12\x1a\n\x0frem_inclination\x18\x1f \x01(\x01:\x01\x30\x12\x1a\n\x0frem_declination\x18 \x01(\x01:\x01\x30\"\xa0\x01\n\x0b\x63Properties\x12\x15\n\x07\x64\x65nsity\x18\x01 \x01(\x01:\x04\x32.67\x12\x17\n\tisotropic\x18\x02 \x01(\x08:\x04true\x12\r\n\x02ka\x18\x03 \x01(\x01:\x01\x30\x12\r\n\x02kb\x18\x04 \x01(\x01:\x01\x30\x12\r\n\x02kc\x18\x05 \x01(\x01:\x01\x30\x12\x0f\n\x04remH\x18\x06 \x01(\x01:\x01\x30\x12\x10\n\x05remAz\x18\x07 \x01(\x01:\x01\x30\x12\x11\n\x06remInc\x18\x08 \x01(\x01:\x01\x30\"s\n\rcBodyPosition\x12\r\n\x02x0\x18\x01 \x01(\x01:\x01\x30\x12\r\n\x02y0\x18\x02 \x01(\x01:\x01\x30\x12\r\n\x02z0\x18\x03 \x01(\x01:\x01\x30\x12\x11\n\x06strike\x18\x04 \x01(\x01:\x01\x30\x12\x0f\n\x03\x64ip\x18\x05 \x01(\x01:\x02\x39\x30\x12\x11\n\x06plunge\x18\x06 \x01(\x01:\x01\x30\"C\n\x0esCylinderShape\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x0e\n\x06radius\x18\x02 \x01(\x01\x12\x11\n\x05slope\x18\x03 \x01(\x01:\x02\x39\x30\"\xb6\x01\n\nsDykeShape\x12\r\n\x05width\x18\x01 \x01(\x01\x12\x11\n\x05slope\x18\x02 \x01(\x01:\x02\x39\x30\x12\x0e\n\x06height\x18\x03 \x01(\x01\x12\x11\n\x06length\x18\x04 \x01(\x01:\x01\x30\x12-\n\nProvenance\x18\x06 \x01(\x0e\x32\x13.ctm.ProvenanceType:\x04none\x12\x34\n\x08polarity\x18\x07 \x01(\x0e\x32\x11.ctm.PolarityType:\x0fNormal_Polarity\":\n\x0bsPrismShape\x12\r\n\x05xSize\x18\x01 \x01(\x01\x12\r\n\x05zSize\x18\x02 \x01(\x01\x12\r\n\x05ySize\x18\x03 \x01(\x01\"5\n\nsSlabShape\x12\t\n\x01\x61\x18\x01 \x01(\x01\x12\t\n\x01\x63\x18\x02 \x01(\x01\x12\x11\n\x06length\x18\x03 \x01(\x01:\x01\x30\"\x19\n\x08\x45\x64geList\x12\r\n\x05pairs\x18\x01 \x03(\x05\"\xb3\x04\n\x05\x63\x42ody\x12\x19\n\x0e\x62\x61\x63kgroundSusc\x18\x01 \x01(\x01:\x01\x30\x12\x1f\n\x11\x62\x61\x63kgroundDensity\x18\x02 \x01(\x01:\x04\x32.67\x12.\n\x08magField\x18\x03 \x01(\x0b\x32\x1c.geophy.InducedMagneticField\x12\'\n\x08position\x18\x05 \x01(\x0b\x32\x15.geophy.cBodyPosition\x12\'\n\nproperties\x18\x06 \x01(\x0b\x32\x13.geophy.cProperties\x12\n\n\x02ma\x18\x07 \x01(\x01\x12\n\n\x02mb\x18\x08 \x01(\x01\x12\n\n\x02mc\x18\t \x01(\x01\x12\n\n\x02na\x18\n \x01(\x01\x12\n\n\x02nb\x18\x0b \x01(\x01\x12\n\n\x02nc\x18\x0c \x01(\x01\x12\x11\n\x05group\x18\r \x01(\x05:\x02-1\x12#\n\x03\x63yl\x18\x15 \x01(\x0b\x32\x16.geophy.sCylinderShape\x12 \n\x04\x64yke\x18\x16 \x01(\x0b\x32\x12.geophy.sDykeShape\x12\"\n\x05prism\x18\x18 \x01(\x0b\x32\x13.geophy.sPrismShape\x12 \n\x04slab\x18\x19 \x01(\x0b\x32\x12.geophy.sSlabShape\x12\x1b\n\x05nodes\x18\x1a \x03(\x0b\x32\x0c.ctm.Point3d\x12\"\n\x08\x65\x64geList\x18\x1b \x03(\x0b\x32\x10.geophy.EdgeList\x12\x15\n\x06wormed\x18\x1e \x01(\x08:\x05\x66\x61lse\x12\x15\n\nsimilarity\x18\x1f \x01(\x01:\x01\x35\x12\x15\n\trms_error\x18 \x01(\x01:\x02-1\"\xd1\x01\n\x06\x63Model\x12\x19\n\x0e\x62\x61\x63kgroundSusc\x18\x01 \x01(\x01:\x01\x30\x12\x1f\n\x11\x62\x61\x63kgroundDensity\x18\x02 \x01(\x01:\x04\x32.67\x12.\n\x08magField\x18\x03 \x01(\x0b\x32\x1c.geophy.InducedMagneticField\x12\x1f\n\x08\x62odyList\x18\x04 \x03(\x0b\x32\r.geophy.cBody\x12,\n\x08\x62odyType\x18\x14 \x01(\x0e\x32\x11.geophy.eBodyType:\x07\x42T_DYKE\x12\x0c\n\x04Name\x18\x15 \x01(\t*d\n\teBodyType\x12\x0b\n\x07\x42T_SLAB\x10\x02\x12\x0b\n\x07\x42T_DYKE\x10\x03\x12\x0c\n\x08\x42T_PRISM\x10\x04\x12\x0f\n\x0b\x42T_CYLINDER\x10\x06\x12\x0c\n\x08\x42T_FACET\x10\x08\x12\x10\n\x0c\x42T_THINPLATE\x10\t'
,
dependencies=[commontaskmodel__pb2.DESCRIPTOR,])
_EBODYTYPE = _descriptor.EnumDescriptor(
name='eBodyType',
full_name='geophy.eBodyType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='BT_SLAB', index=0, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BT_DYKE', index=1, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BT_PRISM', index=2, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BT_CYLINDER', index=3, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BT_FACET', index=4, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BT_THINPLATE', index=5, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1727,
serialized_end=1827,
)
_sym_db.RegisterEnumDescriptor(_EBODYTYPE)
eBodyType = enum_type_wrapper.EnumTypeWrapper(_EBODYTYPE)
BT_SLAB = 2
BT_DYKE = 3
BT_PRISM = 4
BT_CYLINDER = 6
BT_FACET = 8
BT_THINPLATE = 9
_INDUCEDMAGNETICFIELD = _descriptor.Descriptor(
name='InducedMagneticField',
full_name='geophy.InducedMagneticField',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='hx', full_name='geophy.InducedMagneticField.hx', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hy', full_name='geophy.InducedMagneticField.hy', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hz', full_name='geophy.InducedMagneticField.hz', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geofield', full_name='geophy.InducedMagneticField.geofield', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Azimuth', full_name='geophy.InducedMagneticField.Azimuth', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Inclination', full_name='geophy.InducedMagneticField.Inclination', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(90),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Remenance', full_name='geophy.InducedMagneticField.Remenance', index=6,
number=30, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rem_inclination', full_name='geophy.InducedMagneticField.rem_inclination', index=7,
number=31, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rem_declination', full_name='geophy.InducedMagneticField.rem_declination', index=8,
number=32, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=60,
serialized_end=271,
)
_CPROPERTIES = _descriptor.Descriptor(
name='cProperties',
full_name='geophy.cProperties',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='density', full_name='geophy.cProperties.density', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(2.67),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='isotropic', full_name='geophy.cProperties.isotropic', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ka', full_name='geophy.cProperties.ka', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='kb', full_name='geophy.cProperties.kb', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='kc', full_name='geophy.cProperties.kc', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='remH', full_name='geophy.cProperties.remH', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='remAz', full_name='geophy.cProperties.remAz', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='remInc', full_name='geophy.cProperties.remInc', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=274,
serialized_end=434,
)
_CBODYPOSITION = _descriptor.Descriptor(
name='cBodyPosition',
full_name='geophy.cBodyPosition',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='x0', full_name='geophy.cBodyPosition.x0', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='y0', full_name='geophy.cBodyPosition.y0', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='z0', full_name='geophy.cBodyPosition.z0', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='strike', full_name='geophy.cBodyPosition.strike', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dip', full_name='geophy.cBodyPosition.dip', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(90),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='plunge', full_name='geophy.cBodyPosition.plunge', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=436,
serialized_end=551,
)
_SCYLINDERSHAPE = _descriptor.Descriptor(
name='sCylinderShape',
full_name='geophy.sCylinderShape',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='height', full_name='geophy.sCylinderShape.height', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='radius', full_name='geophy.sCylinderShape.radius', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='slope', full_name='geophy.sCylinderShape.slope', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(90),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=553,
serialized_end=620,
)
_SDYKESHAPE = _descriptor.Descriptor(
name='sDykeShape',
full_name='geophy.sDykeShape',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='width', full_name='geophy.sDykeShape.width', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='slope', full_name='geophy.sDykeShape.slope', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(90),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='height', full_name='geophy.sDykeShape.height', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='length', full_name='geophy.sDykeShape.length', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Provenance', full_name='geophy.sDykeShape.Provenance', index=4,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='polarity', full_name='geophy.sDykeShape.polarity', index=5,
number=7, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=623,
serialized_end=805,
)
_SPRISMSHAPE = _descriptor.Descriptor(
name='sPrismShape',
full_name='geophy.sPrismShape',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='xSize', full_name='geophy.sPrismShape.xSize', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='zSize', full_name='geophy.sPrismShape.zSize', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ySize', full_name='geophy.sPrismShape.ySize', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=807,
serialized_end=865,
)
_SSLABSHAPE = _descriptor.Descriptor(
name='sSlabShape',
full_name='geophy.sSlabShape',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='a', full_name='geophy.sSlabShape.a', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='c', full_name='geophy.sSlabShape.c', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='length', full_name='geophy.sSlabShape.length', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=867,
serialized_end=920,
)
_EDGELIST = _descriptor.Descriptor(
name='EdgeList',
full_name='geophy.EdgeList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='pairs', full_name='geophy.EdgeList.pairs', index=0,
number=1, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=922,
serialized_end=947,
)
_CBODY = _descriptor.Descriptor(
name='cBody',
full_name='geophy.cBody',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='backgroundSusc', full_name='geophy.cBody.backgroundSusc', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backgroundDensity', full_name='geophy.cBody.backgroundDensity', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(2.67),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='magField', full_name='geophy.cBody.magField', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='position', full_name='geophy.cBody.position', index=3,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='properties', full_name='geophy.cBody.properties', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ma', full_name='geophy.cBody.ma', index=5,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mb', full_name='geophy.cBody.mb', index=6,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mc', full_name='geophy.cBody.mc', index=7,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='na', full_name='geophy.cBody.na', index=8,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nb', full_name='geophy.cBody.nb', index=9,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nc', full_name='geophy.cBody.nc', index=10,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='group', full_name='geophy.cBody.group', index=11,
number=13, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cyl', full_name='geophy.cBody.cyl', index=12,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dyke', full_name='geophy.cBody.dyke', index=13,
number=22, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prism', full_name='geophy.cBody.prism', index=14,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='slab', full_name='geophy.cBody.slab', index=15,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nodes', full_name='geophy.cBody.nodes', index=16,
number=26, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='edgeList', full_name='geophy.cBody.edgeList', index=17,
number=27, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='wormed', full_name='geophy.cBody.wormed', index=18,
number=30, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='similarity', full_name='geophy.cBody.similarity', index=19,
number=31, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rms_error', full_name='geophy.cBody.rms_error', index=20,
number=32, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(-1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=950,
serialized_end=1513,
)
_CMODEL = _descriptor.Descriptor(
name='cModel',
full_name='geophy.cModel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='backgroundSusc', full_name='geophy.cModel.backgroundSusc', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='backgroundDensity', full_name='geophy.cModel.backgroundDensity', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(2.67),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='magField', full_name='geophy.cModel.magField', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bodyList', full_name='geophy.cModel.bodyList', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bodyType', full_name='geophy.cModel.bodyType', index=4,
number=20, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=3,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='Name', full_name='geophy.cModel.Name', index=5,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1516,
serialized_end=1725,
)
_SDYKESHAPE.fields_by_name['Provenance'].enum_type = commontaskmodel__pb2._PROVENANCETYPE
_SDYKESHAPE.fields_by_name['polarity'].enum_type = commontaskmodel__pb2._POLARITYTYPE
_CBODY.fields_by_name['magField'].message_type = _INDUCEDMAGNETICFIELD
_CBODY.fields_by_name['position'].message_type = _CBODYPOSITION
_CBODY.fields_by_name['properties'].message_type = _CPROPERTIES
_CBODY.fields_by_name['cyl'].message_type = _SCYLINDERSHAPE
_CBODY.fields_by_name['dyke'].message_type = _SDYKESHAPE
_CBODY.fields_by_name['prism'].message_type = _SPRISMSHAPE
_CBODY.fields_by_name['slab'].message_type = _SSLABSHAPE
_CBODY.fields_by_name['nodes'].message_type = commontaskmodel__pb2._POINT3D
_CBODY.fields_by_name['edgeList'].message_type = _EDGELIST
_CMODEL.fields_by_name['magField'].message_type = _INDUCEDMAGNETICFIELD
_CMODEL.fields_by_name['bodyList'].message_type = _CBODY
_CMODEL.fields_by_name['bodyType'].enum_type = _EBODYTYPE
DESCRIPTOR.message_types_by_name['InducedMagneticField'] = _INDUCEDMAGNETICFIELD
DESCRIPTOR.message_types_by_name['cProperties'] = _CPROPERTIES
DESCRIPTOR.message_types_by_name['cBodyPosition'] = _CBODYPOSITION
DESCRIPTOR.message_types_by_name['sCylinderShape'] = _SCYLINDERSHAPE
DESCRIPTOR.message_types_by_name['sDykeShape'] = _SDYKESHAPE
DESCRIPTOR.message_types_by_name['sPrismShape'] = _SPRISMSHAPE
DESCRIPTOR.message_types_by_name['sSlabShape'] = _SSLABSHAPE
DESCRIPTOR.message_types_by_name['EdgeList'] = _EDGELIST
DESCRIPTOR.message_types_by_name['cBody'] = _CBODY
DESCRIPTOR.message_types_by_name['cModel'] = _CMODEL
DESCRIPTOR.enum_types_by_name['eBodyType'] = _EBODYTYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
InducedMagneticField = _reflection.GeneratedProtocolMessageType('InducedMagneticField', (_message.Message,), {
'DESCRIPTOR' : _INDUCEDMAGNETICFIELD,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.InducedMagneticField)
})
_sym_db.RegisterMessage(InducedMagneticField)
cProperties = _reflection.GeneratedProtocolMessageType('cProperties', (_message.Message,), {
'DESCRIPTOR' : _CPROPERTIES,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.cProperties)
})
_sym_db.RegisterMessage(cProperties)
cBodyPosition = _reflection.GeneratedProtocolMessageType('cBodyPosition', (_message.Message,), {
'DESCRIPTOR' : _CBODYPOSITION,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.cBodyPosition)
})
_sym_db.RegisterMessage(cBodyPosition)
sCylinderShape = _reflection.GeneratedProtocolMessageType('sCylinderShape', (_message.Message,), {
'DESCRIPTOR' : _SCYLINDERSHAPE,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.sCylinderShape)
})
_sym_db.RegisterMessage(sCylinderShape)
sDykeShape = _reflection.GeneratedProtocolMessageType('sDykeShape', (_message.Message,), {
'DESCRIPTOR' : _SDYKESHAPE,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.sDykeShape)
})
_sym_db.RegisterMessage(sDykeShape)
sPrismShape = _reflection.GeneratedProtocolMessageType('sPrismShape', (_message.Message,), {
'DESCRIPTOR' : _SPRISMSHAPE,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.sPrismShape)
})
_sym_db.RegisterMessage(sPrismShape)
sSlabShape = _reflection.GeneratedProtocolMessageType('sSlabShape', (_message.Message,), {
'DESCRIPTOR' : _SSLABSHAPE,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.sSlabShape)
})
_sym_db.RegisterMessage(sSlabShape)
EdgeList = _reflection.GeneratedProtocolMessageType('EdgeList', (_message.Message,), {
'DESCRIPTOR' : _EDGELIST,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.EdgeList)
})
_sym_db.RegisterMessage(EdgeList)
cBody = _reflection.GeneratedProtocolMessageType('cBody', (_message.Message,), {
'DESCRIPTOR' : _CBODY,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.cBody)
})
_sym_db.RegisterMessage(cBody)
cModel = _reflection.GeneratedProtocolMessageType('cModel', (_message.Message,), {
'DESCRIPTOR' : _CMODEL,
'__module__' : 'geophysical_models_pb2'
# @@protoc_insertion_point(class_scope:geophy.cModel)
})
_sym_db.RegisterMessage(cModel)
# @@protoc_insertion_point(module_scope)
| 46.741321
| 3,809
| 0.746766
| 5,500
| 41,740
| 5.364364
| 0.063091
| 0.058297
| 0.093377
| 0.076871
| 0.731189
| 0.701396
| 0.69638
| 0.678993
| 0.67601
| 0.671062
| 0
| 0.047828
| 0.127911
| 41,740
| 892
| 3,810
| 46.793722
| 0.762699
| 0.018807
| 0
| 0.704192
| 1
| 0.002395
| 0.169024
| 0.128179
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007186
| 0
| 0.007186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
02038323b77c2bafd72b0efda47851941a327f4b
| 1,655
|
py
|
Python
|
feature_scaling/entities/factory.py
|
Raiz-Environmental-Technology/feature_scaling
|
cf31b7003a4d830b1a6f1fda3620b862376d0e35
|
[
"BSD-2-Clause"
] | null | null | null |
feature_scaling/entities/factory.py
|
Raiz-Environmental-Technology/feature_scaling
|
cf31b7003a4d830b1a6f1fda3620b862376d0e35
|
[
"BSD-2-Clause"
] | null | null | null |
feature_scaling/entities/factory.py
|
Raiz-Environmental-Technology/feature_scaling
|
cf31b7003a4d830b1a6f1fda3620b862376d0e35
|
[
"BSD-2-Clause"
] | null | null | null |
from feature_scaling.custom_typing.feature import Feature
from feature_scaling.models.feature import FeatureModel
from .type.interface import FeatureScalingInterface
class FeatureScalingFactory:
__slots__ = ["_feature_scaling_method"]
def __init__(self, feature_scaling_method: FeatureScalingInterface):
self._feature_scaling_method = None
self.feature_scaling_method = feature_scaling_method
def __str__(self):
return f"Feature Scaling Factory using {self.feature_scaling_method.__str__()}"
def __repr__(self):
return f"{self.__class__.__name__}(feature_scaling_method={self.feature_scaling_method.__repr__()})"
@property
def feature_scaling_method(self) -> FeatureScalingInterface:
return self._feature_scaling_method
@feature_scaling_method.setter
def feature_scaling_method(self, feature_scaling_method: FeatureScalingInterface) -> None:
assert isinstance(feature_scaling_method,
FeatureScalingInterface)
self._feature_scaling_method = feature_scaling_method
@feature_scaling_method.deleter
def feature_scaling_method(self) -> None:
del self._feature_scaling_method
def do(self, feature: Feature) -> Feature:
feature = FeatureModel(feature).feature
return self._feature_scaling_method.do(feature)
def undo(self, original_feature: Feature, scaled_feature: Feature) -> Feature:
original_feature = FeatureModel(original_feature).feature
scaled_feature = FeatureModel(scaled_feature).feature
return self._feature_scaling_method.undo(original_feature, scaled_feature)
| 40.365854
| 108
| 0.759517
| 178
| 1,655
| 6.544944
| 0.202247
| 0.288412
| 0.360515
| 0.226609
| 0.513305
| 0.375966
| 0.358798
| 0.115021
| 0
| 0
| 0
| 0
| 0.170393
| 1,655
| 40
| 109
| 41.375
| 0.848507
| 0
| 0
| 0
| 0
| 0
| 0.10997
| 0.091843
| 0
| 0
| 0
| 0
| 0.033333
| 1
| 0.266667
| false
| 0
| 0.1
| 0.1
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
0212955f693a633826adb207bcba834a299e3e7b
| 6,084
|
py
|
Python
|
metpy/calc/tests/test_kinematics.py
|
wqshen/MetPy
|
fe15ec894bf15582576b090457c3000b4afb3555
|
[
"BSD-3-Clause"
] | 3
|
2016-02-25T08:39:32.000Z
|
2019-10-24T05:12:55.000Z
|
metpy/calc/tests/test_kinematics.py
|
wqshen/MetPy
|
fe15ec894bf15582576b090457c3000b4afb3555
|
[
"BSD-3-Clause"
] | null | null | null |
metpy/calc/tests/test_kinematics.py
|
wqshen/MetPy
|
fe15ec894bf15582576b090457c3000b4afb3555
|
[
"BSD-3-Clause"
] | 2
|
2017-01-06T16:30:40.000Z
|
2020-03-25T22:25:01.000Z
|
# Copyright (c) 2008-2015 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
from metpy.testing import assert_array_equal
import numpy as np
from metpy.calc.kinematics import * # noqa
from metpy.constants import g
from metpy.units import units, concatenate
class TestGradients(object):
def test_basic(self):
'Basic braindead test of vorticity and divergence calculation'
u = np.ones((3, 3)) * units('m/s')
c, v = convergence_vorticity(u, u, 1 * units.meter, 1 * units.meter)
truth = np.zeros_like(u) / units.sec
assert_array_equal(c, truth)
assert_array_equal(v, truth)
def test_basic2(self):
'Basic test of vorticity and divergence calculation'
a = np.arange(3)
u = np.c_[a, a, a] * units('m/s')
c, v = convergence_vorticity(u, u.T, 1 * units.meter, 1 * units.meter)
true_c = 2. * np.ones_like(u) / units.sec
true_v = np.zeros_like(u) / units.sec
assert_array_equal(c, true_c)
assert_array_equal(v, true_v)
def test_basic3(self):
'Basic test of vorticity and divergence calculation'
a = np.arange(3)
u = np.c_[a, a, a] * units('m/s')
c, v = convergence_vorticity(u, u, 1 * units.meter, 1 * units.meter)
true_c = np.ones_like(u) / units.sec
true_v = np.ones_like(u) / units.sec
assert_array_equal(c, true_c)
assert_array_equal(v, true_v)
class TestVort(object):
def test_basic(self):
'Simple test of only vorticity'
a = np.arange(3)
u = np.c_[a, a, a] * units('m/s')
v = v_vorticity(u, u.T, 1 * units.meter, 1 * units.meter)
true_v = np.zeros_like(u) / units.sec
assert_array_equal(v, true_v)
def test_basic3(self):
'Basic test of vorticity and divergence calculation'
a = np.arange(3)
u = np.c_[a, a, a] * units('m/s')
v = v_vorticity(u, u, 1 * units.meter, 1 * units.meter)
true_v = np.ones_like(u) / units.sec
assert_array_equal(v, true_v)
class TestConv(object):
def test_basic(self):
'Simple test of only vorticity'
a = np.arange(3)
u = np.c_[a, a, a] * units('m/s')
c = h_convergence(u, u.T, 1 * units.meter, 1 * units.meter)
true_c = 2. * np.ones_like(u) / units.sec
assert_array_equal(c, true_c)
def test_basic3(self):
'Basic test of vorticity and divergence calculation'
a = np.arange(3)
u = np.c_[a, a, a] * units('m/s')
c = h_convergence(u, u, 1 * units.meter, 1 * units.meter)
true_c = np.ones_like(u) / units.sec
assert_array_equal(c, true_c)
class TestAdvection(object):
def test_basic(self):
'Basic braindead test of advection'
u = np.ones((3,)) * units('m/s')
s = np.ones_like(u) * units.kelvin
a = advection(s, u, (1 * units.meter,))
truth = np.zeros_like(u) * units('K/sec')
assert_array_equal(a, truth)
def test_basic2(self):
'Basic test of advection'
u = np.ones((3,)) * units('m/s')
s = np.array([1, 2, 3]) * units('kg')
a = advection(s, u, (1 * units.meter,))
truth = -np.ones_like(u) * units('kg/sec')
assert_array_equal(a, truth)
def test_basic3(self):
'Basic test of advection'
u = np.array([1, 2, 3]) * units('m/s')
s = np.array([1, 2, 3]) * units('Pa')
a = advection(s, u, (1 * units.meter,))
truth = np.array([-1, -2, -3]) * units('Pa/sec')
assert_array_equal(a, truth)
def test_2dbasic(self):
'Basic 2D braindead test of advection'
u = np.ones((3, 3)) * units('m/s')
s = np.ones_like(u) * units.kelvin
a = advection(s, [u, u], (1 * units.meter, 1 * units.meter))
truth = np.zeros_like(u) * units('K/sec')
assert_array_equal(a, truth)
def test_2dbasic2(self):
'Basic 2D test of advection'
u = np.ones((3, 3)) * units('m/s')
v = 2 * np.ones((3, 3)) * units('m/s')
s = np.array([[1, 2, 1], [2, 4, 2], [1, 2, 1]]) * units.kelvin
a = advection(s, [u, v], (1 * units.meter, 1 * units.meter))
truth = np.array([[-3, -2, 1], [-4, 0, 4], [-1, 2, 3]]) * units('K/sec')
assert_array_equal(a, truth)
class TestGeos(object):
def test_basic(self):
'Basic test of geostrophic wind calculation'
z = np.array([[48, 49, 48], [49, 50, 49], [48, 49, 48]]) * 100. * units.meter
# Using g as the value for f allows it to cancel out
ug, vg = geostrophic_wind(z, g.magnitude / units.sec,
100. * units.meter, 100. * units.meter)
true_u = np.array([[-1, 0, 1]] * 3) * units('m/s')
true_v = -true_u.T
assert_array_equal(ug, true_u)
assert_array_equal(vg, true_v)
def test_geopotential(self):
'Test of geostrophic wind calculation with geopotential'
z = np.array([[48, 49, 48], [49, 50, 49], [48, 49, 48]]) * 100. * units('m^2/s^2')
ug, vg = geostrophic_wind(z, 1 / units.sec, 100. * units.meter, 100. * units.meter)
true_u = np.array([[-1, 0, 1]] * 3) * units('m/s')
true_v = -true_u.T
assert_array_equal(ug, true_u)
assert_array_equal(vg, true_v)
def test_3d(self):
'Test of geostrophic wind calculation with 3D array'
z = np.array([[48, 49, 48], [49, 50, 49], [48, 49, 48]]) * 100.
# Using g as the value for f allows it to cancel out
z3d = np.dstack((z, z)) * units.meter
ug, vg = geostrophic_wind(z3d, g.magnitude / units.sec,
100. * units.meter, 100. * units.meter)
true_u = np.array([[-1, 0, 1]] * 3) * units('m/s')
true_v = -true_u.T
true_u = concatenate((true_u[..., None], true_u[..., None]), axis=2)
true_v = concatenate((true_v[..., None], true_v[..., None]), axis=2)
assert_array_equal(ug, true_u)
assert_array_equal(vg, true_v)
| 39
| 91
| 0.568212
| 951
| 6,084
| 3.502629
| 0.112513
| 0.087061
| 0.105674
| 0.068448
| 0.82888
| 0.803062
| 0.774542
| 0.732513
| 0.679976
| 0.631042
| 0
| 0.043964
| 0.278435
| 6,084
| 155
| 92
| 39.251613
| 0.714806
| 0.142012
| 0
| 0.625
| 0
| 0
| 0.118444
| 0
| 0
| 0
| 0
| 0
| 0.171875
| 1
| 0.117188
| false
| 0
| 0.039063
| 0
| 0.195313
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0243b1d129351ad63ed4b94db9cb83ea934bc398
| 198
|
py
|
Python
|
pbp/callbacks/__init__.py
|
ArnaudPannatier/pytorch-boilerplate
|
1e90e359fc9247ae08e416c51d46ef7a9b8fb56f
|
[
"MIT"
] | 2
|
2021-06-29T20:57:46.000Z
|
2021-06-29T23:35:18.000Z
|
pbp/callbacks/__init__.py
|
ArnaudPannatier/pytorch-boilerplate
|
1e90e359fc9247ae08e416c51d46ef7a9b8fb56f
|
[
"MIT"
] | null | null | null |
pbp/callbacks/__init__.py
|
ArnaudPannatier/pytorch-boilerplate
|
1e90e359fc9247ae08e416c51d46ef7a9b8fb56f
|
[
"MIT"
] | 1
|
2021-04-16T07:01:52.000Z
|
2021-04-16T07:01:52.000Z
|
"""Define and implement the callback interface."""
from .base import Callback, CallbackList, CallbackListFactory
from .checkpoint import ModelCheckpoint
from .logger import TxtLogger, StdoutLogger
| 33
| 61
| 0.823232
| 21
| 198
| 7.761905
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 198
| 5
| 62
| 39.6
| 0.926136
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0252028d6a498e20736bc3a7dd147a7a0be9f337
| 226
|
py
|
Python
|
src/compas_hpc/geometry/__init__.py
|
yijiangh/compas
|
a9e86edf6b602f47ca051fccedcaa88a5e5d3600
|
[
"MIT"
] | 1
|
2019-03-27T22:32:56.000Z
|
2019-03-27T22:32:56.000Z
|
src/compas_hpc/geometry/__init__.py
|
yijiangh/compas
|
a9e86edf6b602f47ca051fccedcaa88a5e5d3600
|
[
"MIT"
] | null | null | null |
src/compas_hpc/geometry/__init__.py
|
yijiangh/compas
|
a9e86edf6b602f47ca051fccedcaa88a5e5d3600
|
[
"MIT"
] | 1
|
2022-01-16T02:32:43.000Z
|
2022-01-16T02:32:43.000Z
|
from .basic_numba import *
from .average_numba import *
from .spatial_numba import *
from .basic_numba import __all__ as a
from .average_numba import __all__ as b
from .spatial_numba import __all__ as c
__all__ = a + b + c
| 20.545455
| 39
| 0.769912
| 37
| 226
| 4.108108
| 0.297297
| 0.434211
| 0.296053
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176991
| 226
| 10
| 40
| 22.6
| 0.817204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
027daa06b037580e5fe14add28ed405ec697ab45
| 131
|
py
|
Python
|
job_position/admin.py
|
resourceidea/resourceideaapi
|
4cc7db98f981d8f2011c1995e23e8a8655e31f75
|
[
"MIT"
] | 1
|
2020-05-30T22:27:59.000Z
|
2020-05-30T22:27:59.000Z
|
job_position/admin.py
|
resourceidea/resourceideaapi
|
4cc7db98f981d8f2011c1995e23e8a8655e31f75
|
[
"MIT"
] | 15
|
2020-02-11T21:53:08.000Z
|
2021-11-02T21:20:03.000Z
|
job_position/admin.py
|
resourceidea/resourceideaapi
|
4cc7db98f981d8f2011c1995e23e8a8655e31f75
|
[
"MIT"
] | 1
|
2020-08-27T10:57:47.000Z
|
2020-08-27T10:57:47.000Z
|
from django.contrib import admin # type: ignore
from job_position.models import JobPosition
admin.site.register(JobPosition)
| 26.2
| 49
| 0.801527
| 17
| 131
| 6.117647
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137405
| 131
| 4
| 50
| 32.75
| 0.920354
| 0.091603
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
027e0c6dfa71c285f99b1457a824d036ecbd4249
| 98
|
py
|
Python
|
yew/modules/http/__init__.py
|
Claudjos/yew
|
567e0ed55f9580dac8493b38aa354688e6aa0394
|
[
"MIT"
] | null | null | null |
yew/modules/http/__init__.py
|
Claudjos/yew
|
567e0ed55f9580dac8493b38aa354688e6aa0394
|
[
"MIT"
] | null | null | null |
yew/modules/http/__init__.py
|
Claudjos/yew
|
567e0ed55f9580dac8493b38aa354688e6aa0394
|
[
"MIT"
] | null | null | null |
from .servers import Proxy
from .upstreams import ParentProxy, WebServer
from .rules import Rules
| 24.5
| 45
| 0.826531
| 13
| 98
| 6.230769
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132653
| 98
| 3
| 46
| 32.666667
| 0.952941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5a09223ffa42b387b4f972ff7403438b607f7144
| 11,955
|
py
|
Python
|
get_cdf/get_cdf.py
|
2218084076/hotpoor_autoclick_xhs
|
a52446ba691ac19e43410a465dc63f940c0e444d
|
[
"Apache-2.0"
] | 1
|
2021-12-21T10:42:46.000Z
|
2021-12-21T10:42:46.000Z
|
get_cdf/get_cdf.py
|
2218084076/hotpoor_autoclick_xhs
|
a52446ba691ac19e43410a465dc63f940c0e444d
|
[
"Apache-2.0"
] | null | null | null |
get_cdf/get_cdf.py
|
2218084076/hotpoor_autoclick_xhs
|
a52446ba691ac19e43410a465dc63f940c0e444d
|
[
"Apache-2.0"
] | null | null | null |
import sys
import os
import pyautogui
import time
import pyperclip
# product-item-default
# document.getElementsByClassName("product-item-default").length 查看页面pages数量
# Chrome打开浏览器 https://pgy.xiaohongshu.com/solar/advertiser/patterns/kol
# 选择分类
# 打开审查元素工具 位置1160px
# 滚动屏幕至全部右下角
page_num = 0
page_num_end = 3
# SK-II 从第二页开始
page_with_items = [20,20,20,2]
action_list = [
{
"x":127,
"y":17,
"sleep":1,
"name":"move_to_click",
"content":"",
"action_name":"切换pgy页面",
},
]
def pyautogui_action(action):
if action["name"] in ["move_to_click"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
elif action["name"] in ["select_all_and_write"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
time.sleep(1)
pyautogui.hotkey("ctrl", "a")
write_content = action.get("content","")
pyautogui.typewrite(write_content)
pyautogui.press('enter')
elif action["name"] in ["select_all_and_js_latest"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
pyautogui.hotkey("ctrl", "a")
pyautogui.press('backspace')
pyautogui.press('up')
pyautogui.press('enter')
elif action["name"] in ["select_all_and_copy"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
pyautogui.hotkey("ctrl", "a")
pyautogui.hotkey("ctrl", "x")
elif action["name"] in ["select_all_and_paste"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
pyautogui.hotkey("ctrl", "a")
pyautogui.hotkey("ctrl", "v")
elif action["name"] in ["select_item_and_close_tab"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
pyautogui.hotkey("ctrl", "w")
elif action["name"] in ["select_all_and_copy_and_paste"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
write_content = action.get("content","")
pyperclip.copy(write_content)
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
pyautogui.hotkey("ctrl", "v")
pyautogui.press('enter')
elif action["name"] in ["open_console"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
pyautogui.hotkey("f12")
elif action["name"] in ["refresh"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
pyautogui.hotkey("f5")
elif action["name"] in ["esc"]:
pyautogui.moveTo(x=action.get("x",None), y=action.get("y",None),duration=0, tween=pyautogui.linear)
pyautogui.click(x=action.get("x",None), y=action.get("y",None),clicks=1, button='left')
pyautogui.hotkey("esc")
print(action.get("action_name"))
action_sleep = action.get("sleep",0)
time.sleep(action_sleep)
for page in page_with_items:
action_page_change = {
"x":127,
"y":17,
"sleep":0.5,
"name":"move_to_click",
"content":"",
"action_name":"点击选项卡",
}
pyautogui_action(action_page_change)
for item in range(0,page):
action_item_click_list = [
{
"x":1377,
"y":147,
"sleep":0.5,
"name":"move_to_click",
"content":"",
"action_name":"切换console",
},
{
"x":1204,
"y":172,
"sleep":0.5,
"name":"move_to_click",
"content":"",
"action_name":"清空信息console",
},
{
"x":1282,
"y":995,
"sleep":2,
"name":"select_all_and_copy_and_paste",
#document.getElementsByClassName("lamer-product-item")[0].getElementsByTagName("a")[0].click()
# "content": "document.getElementsByClassName(\"lamer-product-item\")[%s].getElementsByTagName(\"a\")[0].click()" % (item),
"content":"document.getElementsByClassName(\"product-item-default\")[%s].children[1].click()"%(item),
"action_name":"切换产品",
},
{
"x":453,
"y":16,
"sleep":0.5,
"name":"open_console",
"content":"",
"action_name":"open_console",
},
{
"x":1377,
"y":147,
"sleep":0.5,
"name":"select_all_and_copy_and_paste",
"content":"",
"action_name":"选择console",
},
{
"x":1204,
"y":172,
"sleep": 0.5,
"name": "move_to_click",
"content": "",
"action_name": "清空信息console",
},
{
"x":1282,
"y":995,
"sleep":0.5,
"name":"select_all_and_copy_and_paste",
"content":
"""
result=[]
result.push(document.getElementsByClassName("detail-box-title")[0].innerText)
result.push(document.getElementsByClassName("product-name")[0].innerText)
result.push(document.getElementsByClassName("product-code-value")[0].innerText)
result.push(document.getElementsByClassName("price-now")[0].innerText)
cxs=document.getElementsByClassName("promotion-item")
cxs_info = []
for (i=0;i<cxs.length;i++){
cxs_info.push(cxs[i].innerText)
}
ths=document.getElementsByClassName("property-item-title")
tds=document.getElementsByClassName("property-item-value")
kv={}
for (i=0;i<ths.length;i++){
kv[ths[i].innerText]=tds[i].innerText
}
result_info = {
"detail-box-title":result[0],
"product-name":result[1],
"product-code-value":result[2],
"price-now":result[3],
"promotion-item":cxs_info,
"property-item":kv,
}
dom=document.createElement("div")
dom.id="wlb_cover"
dom.style.position="fixed"
dom.style.top="0px"
dom.style.right="0px"
dom.innerHTML="<textarea id=\"wlb_cover_textarea\">"+JSON.stringify(result_info)+"</textarea>"
document.body.append(dom)
""",
"action_name":"执行获取内容的JS",
},
{
"x":1023,
"y":152,
"sleep":0.5,
"name":"select_all_and_copy",
"content":"",
"action_name":"copy"
},
{
"x": 443,
"y": 11,
"sleep": 0.5,
"name": "select_item_and_close_tab",
"content": "",
"action_name": "关闭选项卡",
},
{
"x": 443,
"y": 11,
"sleep": 0.5,
"name": "move_to_click",
"content": "",
"action_name": "点击选项卡",
},
{
"x": 443,
"y": 11,
"sleep": 0.5,
"name": "esc",
"content": "",
"action_name": "esc",
},
{
"x": 445,
"y": 232,
"sleep": 0.5,
"name": "select_all_and_paste",
"content": "",
"action_name": "提交",
},
{
"x": 586,
"y": 244,
"sleep": 0.5,
"name": "move_to_click",
"content": "",
"action_name": "submit",
},
{
"x": 127,
"y": 17,
"sleep": 0.5,
"name": "move_to_click",
"content": "",
"action_name": "点击选项卡",
},
{
"x": 127,
"y": 17,
"sleep": 0.5,
"name": "move_to_click",
"content": "",
"action_name": "切换pgy页面",
},
]
for action_item_click in action_item_click_list:
pyautogui_action(action_item_click)
action_page_change_list = [
{
"x":1377,
"y":147,
"sleep":0.5,
"name":"move_to_click",
"content":"",
"action_name":"切换console",
},
{
"x":1204,
"y":172,
"sleep":0.5,
"name":"move_to_click",
"content":"",
"action_name":"清空信息console",
},
{
"x":1282,
"y":995,
"sleep":1,
"name":"select_all_and_copy_and_paste",
"content":'''
document.getElementsByClassName("cm-pagination-next")[0].click()
''',
# "content":'document.getElementsByClassName("lamer-pagination-next")[0].click()',
"action_name":"切换产品页",
},
{
"x": 1282,
"y": 995,
"sleep": 0.5,
"name": "select_all_and_copy_and_paste",
"content": '''
scrollBy(0,9999)
''',
# "content":'document.getElementsByClassName("lamer-pagination-next")[0].click()',
"action_name": "切换产品页",
},
]
for action_page_change in action_page_change_list:
pyautogui_action(action_page_change)
'''
result=[]
result.push(document.getElementsByClassName("detail-box-title")[0].innerText)
result.push(document.getElementsByClassName("product-name")[0].innerText)
result.push(document.getElementsByClassName("product-code-value")[0].innerText)
result.push(document.getElementsByClassName("price-now")[0].innerText)
cxs=document.getElementsByClassName("promotion-item")
cxs_info = []
for (i=0;i<cxs.length;i++){
cxs_info.push(cxs[i].innerText)
}
ths=document.getElementsByClassName("property-item-title")
tds=document.getElementsByClassName("property-item-value")
kv={}
for (i=0;i<ths.length;i++){
kv[ths[i].innerText]=tds[i].innerText
}
result_info = {
"detail-box-title":result[0],
"product-name":result[1],
"product-code-value":result[2],
"price-now":result[3],
"promotion-item":cxs_info,
"property-item":kv,
}
dom=document.createElement("div")
dom.id="wlb_cover"
dom.style.position="fixed"
dom.style.top="0px"
dom.style.right="0px"
dom.innerHTML="<textarea id=\"wlb_cover_textarea\">"+JSON.stringify(result_info)+"</textarea>"
document.body.append(dom)
'''
| 35.369822
| 139
| 0.52271
| 1,319
| 11,955
| 4.613343
| 0.126611
| 0.070994
| 0.036154
| 0.03977
| 0.820871
| 0.759573
| 0.750698
| 0.727855
| 0.695809
| 0.695809
| 0
| 0.029362
| 0.302049
| 11,955
| 337
| 140
| 35.474777
| 0.699904
| 0.049519
| 0
| 0.549801
| 0
| 0
| 0.187445
| 0.040464
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003984
| false
| 0
| 0.01992
| 0
| 0.023904
| 0.003984
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5a0a4987345ff7680c0c7e6d034e1692db6bfb8d
| 2,123
|
py
|
Python
|
tests/models/test_lead_model.py
|
sixcodes/brandenburg
|
bb360590e5763456a1e54201a1960a3e0b01b16c
|
[
"BSD-3-Clause"
] | 3
|
2020-07-17T04:40:49.000Z
|
2020-08-14T14:34:11.000Z
|
tests/models/test_lead_model.py
|
sixcodes/brandenburg
|
bb360590e5763456a1e54201a1960a3e0b01b16c
|
[
"BSD-3-Clause"
] | 22
|
2020-06-23T02:13:30.000Z
|
2021-05-05T02:12:17.000Z
|
tests/models/test_lead_model.py
|
sixcodes/brandenburg
|
bb360590e5763456a1e54201a1960a3e0b01b16c
|
[
"BSD-3-Clause"
] | 2
|
2020-06-23T01:56:52.000Z
|
2020-07-14T21:47:41.000Z
|
# Third party imports
import pytest
from pydantic import ValidationError
# Local application imports
from brandenburg.models.lead import LeadModel
def test_good_data():
lead = LeadModel(
name="Maria Silva",
phone_number="55912345678",
email="maria@gmail.com",
role="farmer",
is_term_accepted="True",
origin="lpx",
)
assert lead == {
"name": "Maria Silva",
"phone_number": "55912345678",
"email": "maria@gmail.com",
"role": "farmer",
"group": "A",
"is_term_accepted": "True",
"origin": "lpx",
"by": "salesforce",
}
def test_with_group_A():
lead = LeadModel(
name="Maria Silva", phone_number="55912345678", email="maria@gmail.com", is_term_accepted="True", origin="lpx",
)
assert lead.group == "A"
def test_with_group_A_yahoo():
lead = LeadModel(
name="Maria Silva", phone_number="55912345678", email="maria@yahoo.it", is_term_accepted="True", origin="lpx",
)
assert lead.group == "A"
def test_with_group_B():
lead = LeadModel(
name="Maria Silva",
phone_number="55912345678",
email="maria@apolloagricola.com.br",
is_term_accepted="True",
origin="lpx",
)
assert lead.group == "B"
def test_raise_error_with_wrong_name():
with pytest.raises(ValidationError) as ex:
LeadModel(
name="M", phone_number="55912345678", email="maria@apolloagricola.com.br",
)
def test_raise_error_with_short_phone_number():
with pytest.raises(ValidationError) as ex:
LeadModel(
name="Maria Silva", phone_number="345678", email="maria@apolloagricola.com.br",
)
def test_raise_error_with_wrong_email():
with pytest.raises(ValidationError) as ex:
LeadModel(name="Maria Silva", phone_number="55912345678", email="maria@yahoo.")
def test_raise_error_with_letter_in_phone_number():
with pytest.raises(ValidationError) as ex:
LeadModel(
name="Maria Silva", phone_number="aa912345678", email="maria@yahoo.com",
)
| 26.5375
| 119
| 0.634951
| 248
| 2,123
| 5.217742
| 0.233871
| 0.093509
| 0.086553
| 0.117465
| 0.826121
| 0.795981
| 0.751159
| 0.751159
| 0.653787
| 0.621329
| 0
| 0.056407
| 0.231748
| 2,123
| 79
| 120
| 26.873418
| 0.736971
| 0.021196
| 0
| 0.355932
| 0
| 0
| 0.224096
| 0.039036
| 0
| 0
| 0
| 0
| 0.067797
| 1
| 0.135593
| false
| 0
| 0.050847
| 0
| 0.186441
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5a0bca84db50f61b8489b77082ba9b21ea433196
| 83
|
py
|
Python
|
account/managers.py
|
sevenstar77/coin_dev
|
2dd898d15fcb5f7bf4cfd37d5601b23b36526f3f
|
[
"MIT"
] | null | null | null |
account/managers.py
|
sevenstar77/coin_dev
|
2dd898d15fcb5f7bf4cfd37d5601b23b36526f3f
|
[
"MIT"
] | null | null | null |
account/managers.py
|
sevenstar77/coin_dev
|
2dd898d15fcb5f7bf4cfd37d5601b23b36526f3f
|
[
"MIT"
] | null | null | null |
from django.db.models import Manager
class MyaccountinfoManager(Manager):
pass
| 20.75
| 36
| 0.807229
| 10
| 83
| 6.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 83
| 4
| 37
| 20.75
| 0.930556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
5a13edf4606d19785f8f1fd05965da872210b0f4
| 144
|
py
|
Python
|
src/daipecore/decorator/StringableParameterInterface.py
|
daipe-ai/daipe-core
|
aa205495fa6b464fa6078d17e439c60345ac99ea
|
[
"MIT"
] | 1
|
2021-09-17T09:07:09.000Z
|
2021-09-17T09:07:09.000Z
|
src/daipecore/decorator/StringableParameterInterface.py
|
daipe-ai/daipe-core
|
aa205495fa6b464fa6078d17e439c60345ac99ea
|
[
"MIT"
] | 2
|
2021-12-20T07:46:33.000Z
|
2022-02-24T07:02:05.000Z
|
src/daipecore/decorator/StringableParameterInterface.py
|
daipe-ai/daipe-core
|
aa205495fa6b464fa6078d17e439c60345ac99ea
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
class StringableParameterInterface(ABC):
@abstractmethod
def to_string(self) -> str:
pass
| 18
| 40
| 0.715278
| 15
| 144
| 6.8
| 0.8
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.215278
| 144
| 7
| 41
| 20.571429
| 0.902655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
5a1627bbae823c1208d9e969f62e365458c30f13
| 5,195
|
py
|
Python
|
pyscripts/benchmark/benchmark_by_mIoU.py
|
xgmiao/Adaptive_Affinity_Fields
|
8028f22e2664dc0ac6b0e3a18a9fb664e3dec7f9
|
[
"MIT"
] | null | null | null |
pyscripts/benchmark/benchmark_by_mIoU.py
|
xgmiao/Adaptive_Affinity_Fields
|
8028f22e2664dc0ac6b0e3a18a9fb664e3dec7f9
|
[
"MIT"
] | null | null | null |
pyscripts/benchmark/benchmark_by_mIoU.py
|
xgmiao/Adaptive_Affinity_Fields
|
8028f22e2664dc0ac6b0e3a18a9fb664e3dec7f9
|
[
"MIT"
] | null | null | null |
import argparse
import os
import numpy as np
from PIL import Image
from utils.metrics import iou_stats
# tp_fn = np.zeros(args.num_classes, dtype=np.float64)
# tp_fp = np.zeros(args.num_classes, dtype=np.float64)
# tp = np.zeros(args.num_classes, dtype=np.float64)
#
# for dirpath, dirnames, filenames in os.walk(args.pred_dir):
# for filename in filenames:
# predname = os.path.join(dirpath, filename)
# gtname = predname.replace(args.pred_dir, args.gt_dir)
# if args.string_replace != '':
# stra, strb = args.string_replace.split(',')
# gtname = gtname.replace(stra, strb)
#
# pred = np.asarray(
# Image.open(predname).convert(mode='L'),
# dtype=np.uint8)
#
# gt = np.asarray(
# Image.open(gtname).convert(mode='P'),
# dtype=np.uint8)
#
# _tp_fn, _tp_fp, _tp = iou_stats(
# pred,
# gt,
# num_classes=args.num_classes,
# background=0)
#
# tp_fn += _tp_fn
# tp_fp += _tp_fp
# tp += _tp
#
# iou = tp / (tp_fn + tp_fp - tp + 1e-12) * 100.0
#
# class_names = ['Background', 'Aero', 'Bike', 'Bird', 'Boat',
# 'Bottle', 'Bus', 'Car', 'Cat', 'Chair', 'Cow',
# 'Table', 'Dog', 'Horse', 'MBike', 'Person',
# 'Plant', 'Sheep', 'Sofa', 'Train', 'TV']
#
# for i in range(args.num_classes):
# print('class {:10s}: {:02d}, acc: {:4.4f}%'.format(
# class_names[i], i, iou[i]))
# mean_iou = iou.sum() / args.num_classes
# print('mean IOU: {:4.4f}%'.format(mean_iou))
#
# mean_pixel_acc = tp.sum() / (tp_fp.sum() + 1e-12)
# print('mean Pixel Acc: {:4.4f}%'.format(mean_pixel_acc))
def calcu_voc_mIou(pred_dir, gt_dir):
assert os.path.isdir(pred_dir)
assert os.path.isdir(gt_dir)
print('......')
n_class = 21
tp_fn = np.zeros(n_class, dtype=np.float64)
tp_fp = np.zeros(n_class, dtype=np.float64)
tp = np.zeros(n_class, dtype=np.float64)
for parent, dirs, files in os.walk(pred_dir):
for file in files:
pred_img_file = os.path.join(parent, file)
gt_img_file = pred_img_file.replace(pred_dir, gt_dir)
# if args.string_replace != '':
# stra, strb = args.string_replace.split(',')
# gtname = gtname.replace(stra, strb)
pred = np.asarray(
Image.open(pred_img_file).convert(mode='L'),
dtype=np.uint8)
gt = np.asarray(
Image.open(gt_img_file).convert(mode='P'),
dtype=np.uint8)
_tp_fn, _tp_fp, _tp = iou_stats(
pred,
gt,
num_classes=n_class,
background=0)
tp_fn += _tp_fn
tp_fp += _tp_fp
tp += _tp
iou = tp / (tp_fn + tp_fp - tp + 1e-12) * 100.0
class_names = ['Background', 'Aero', 'Bike', 'Bird', 'Boat',
'Bottle', 'Bus', 'Car', 'Cat', 'Chair', 'Cow',
'Table', 'Dog', 'Horse', 'MBike', 'Person',
'Plant', 'Sheep', 'Sofa', 'Train', 'TV']
for i in range(n_class):
print('class {:10s}: {:02d}, acc: {:4.4f}%'.format(class_names[i], i, iou[i]))
mean_iou = iou.sum() / n_class
print('mean IOU: {:4.4f}%'.format(mean_iou))
mean_pixel_acc = tp.sum() / (tp_fp.sum() + 1e-12)
print('mean Pixel Acc: {:4.4f}%'.format(mean_pixel_acc))
def calcu_cityscapes_mIou(pred_dir, gt_dir):
assert os.path.isdir(pred_dir)
assert os.path.isdir(gt_dir)
n_class = 19
tp_fn = np.zeros(n_class, dtype=np.float64)
tp_fp = np.zeros(n_class, dtype=np.float64)
tp = np.zeros(n_class, dtype=np.float64)
for parent, dirs, files in os.walk(pred_dir):
for file in files:
pred_img_file = os.path.join(parent, file)
gt_img_file = pred_img_file.replace(pred_dir, gt_dir)
gt_img_file = gt_img_file.replace('leftImg8bit', 'gtFineId_labelIds')
pred = np.asarray(
Image.open(pred_img_file).convert(mode='L'),
dtype=np.uint8)
gt = np.asarray(
Image.open(gt_img_file).convert(mode='L'),
dtype=np.uint8)
_tp_fn, _tp_fp, _tp = iou_stats(
pred,
gt,
num_classes=n_class,
background=0)
tp_fn += _tp_fn
tp_fp += _tp_fp
tp += _tp
iou = tp / (tp_fn + tp_fp - tp + 1e-12) * 100.0
class_names = ['Background', 'Aero', 'Bike', 'Bird', 'Boat',
'Bottle', 'Bus', 'Car', 'Cat', 'Chair', 'Cow',
'Table', 'Dog', 'Horse', 'MBike', 'Person',
'Plant', 'Sheep', 'Sofa', 'Train', 'TV']
for i in range(n_class):
print('class {:10s}: {:02d}, acc: {:4.4f}%'.format(class_names[i], i, iou[i]))
mean_iou = iou.sum() / n_class
print('mean IOU: {:4.4f}%'.format(mean_iou))
mean_pixel_acc = tp.sum() / (tp_fp.sum() + 1e-12)
print('mean Pixel Acc: {:4.4f}%'.format(mean_pixel_acc))
def get_arguments():
parser = argparse.ArgumentParser(
description='Benchmark segmentation predictions'
)
parser.add_argument('--dataset',type=str,default='voc',
help='dataset')
parser.add_argument('--pred-dir', type=str, default='',
help='/path/to/prediction.')
parser.add_argument('--gt-dir', type=str, default='',
help='/path/to/ground-truths')
parser.add_argument('--string-replace', type=str, default=',',
help='replace the first string with the second one')
return parser.parse_args()
def main():
args = get_arguments()
if args.dataset.lower()=='voc':
calcu_voc_mIou(args.pred_dir,args.gt_dir)
elif args.dataset.lower()=='cityscapes':
calcu_cityscapes_mIou(args.pred_dir,args.gt_dir)
else:
pass
if __name__ == '__main__':
main()
| 28.543956
| 80
| 0.635611
| 806
| 5,195
| 3.879653
| 0.171216
| 0.023025
| 0.023025
| 0.023025
| 0.752159
| 0.752159
| 0.747362
| 0.714103
| 0.70259
| 0.678925
| 0
| 0.02147
| 0.175168
| 5,195
| 182
| 81
| 28.543956
| 0.708285
| 0.279692
| 0
| 0.647059
| 0
| 0
| 0.155658
| 0.005956
| 0
| 0
| 0
| 0
| 0.039216
| 1
| 0.039216
| false
| 0.009804
| 0.04902
| 0
| 0.098039
| 0.068627
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5a267cfed9449761fd7dc7142848ad0d4c4ec100
| 41
|
py
|
Python
|
tests/scruples/extraction/__init__.py
|
allenai/scruples
|
9a43459c507e57d89ab8442a4f3985cedecb8710
|
[
"Apache-2.0"
] | 29
|
2020-05-09T10:55:45.000Z
|
2022-03-28T16:18:02.000Z
|
tests/scruples/extraction/__init__.py
|
allenai/scruples
|
9a43459c507e57d89ab8442a4f3985cedecb8710
|
[
"Apache-2.0"
] | null | null | null |
tests/scruples/extraction/__init__.py
|
allenai/scruples
|
9a43459c507e57d89ab8442a4f3985cedecb8710
|
[
"Apache-2.0"
] | 6
|
2020-10-05T12:24:28.000Z
|
2021-12-06T19:51:06.000Z
|
"""Tests for ``scruples.extraction``."""
| 20.5
| 40
| 0.634146
| 4
| 41
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.684211
| 0.829268
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ce64fcaade4574aaaa9fffa90307dd0b64d58aa5
| 127
|
py
|
Python
|
029/main.py
|
alexprengere/euler
|
d93dada0fe434cd736d11b9cfb1635146130f24a
|
[
"Apache-2.0"
] | null | null | null |
029/main.py
|
alexprengere/euler
|
d93dada0fe434cd736d11b9cfb1635146130f24a
|
[
"Apache-2.0"
] | null | null | null |
029/main.py
|
alexprengere/euler
|
d93dada0fe434cd736d11b9cfb1635146130f24a
|
[
"Apache-2.0"
] | null | null | null |
powers = set()
N = 100
for a in range(2, N + 1):
for b in range(2, N + 1):
powers.add(a ** b)
print(len(powers))
| 14.111111
| 29
| 0.519685
| 25
| 127
| 2.64
| 0.56
| 0.212121
| 0.242424
| 0.272727
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 0.299213
| 127
| 8
| 30
| 15.875
| 0.662921
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ceb4703afeb048d05ebe9eef496f792fffcddaa7
| 688
|
py
|
Python
|
raw_type.py
|
QiXi9409/Simultaneous_ECG_Heartbeat
|
8b61b6434c5c505c0d55a46db08e627d275fc045
|
[
"MIT"
] | 1
|
2022-01-21T06:29:19.000Z
|
2022-01-21T06:29:19.000Z
|
raw_type.py
|
sliang11/ECG-FasterRCNN
|
8984084d570a0e45bf3508a1a23d562ba147ca84
|
[
"MIT"
] | null | null | null |
raw_type.py
|
sliang11/ECG-FasterRCNN
|
8984084d570a0e45bf3508a1a23d562ba147ca84
|
[
"MIT"
] | 2
|
2020-06-02T01:31:29.000Z
|
2021-12-30T12:58:52.000Z
|
from abc import abstractclassmethod
class raw_type():
@abstractclassmethod
def read_data(self, path):
pass
@abstractclassmethod
def split(self):
pass
@abstractclassmethod
def annotation(self):
pass
@abstractclassmethod
def correct(self):
pass
@abstractclassmethod
def tensecond(self):
pass
@abstractclassmethod
def filter_data(self):
pass
def process(self, path):
self.read_data(path)
self.filter_data()
self.annotation()
self.split()
self.tensecond()
# self.filter_data()
self.correct()
| 18.594595
| 36
| 0.569767
| 62
| 688
| 6.225806
| 0.306452
| 0.341969
| 0.336788
| 0.310881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.351744
| 688
| 36
| 37
| 19.111111
| 0.865471
| 0.026163
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.259259
| false
| 0.222222
| 0.037037
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
cebaac4216596ccde9dd01d2d58c05bd03a803b5
| 125
|
py
|
Python
|
__init__.py
|
alexgonzl/TreeMazeAnalyses2
|
9bd20328368a915a0d9b81c02ae7af37c5c0c839
|
[
"MIT"
] | null | null | null |
__init__.py
|
alexgonzl/TreeMazeAnalyses2
|
9bd20328368a915a0d9b81c02ae7af37c5c0c839
|
[
"MIT"
] | null | null | null |
__init__.py
|
alexgonzl/TreeMazeAnalyses2
|
9bd20328368a915a0d9b81c02ae7af37c5c0c839
|
[
"MIT"
] | null | null | null |
# import Pre_Processing
# import Sorting
# import Utils
# import Analyses
from .Analyses.experiment_info import SubjectInfo
| 17.857143
| 49
| 0.816
| 15
| 125
| 6.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136
| 125
| 6
| 50
| 20.833333
| 0.925926
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0c9582c981591496c9a4fcfdc94d673a6ca182c9
| 176
|
py
|
Python
|
speech_datasets/utils/__init__.py
|
salesforce/speech-datasets
|
48a935727c38d150e3b86b99bdda65e0afd69920
|
[
"Apache-2.0"
] | 11
|
2021-09-14T23:13:58.000Z
|
2022-02-24T07:11:09.000Z
|
speech_datasets/utils/__init__.py
|
salesforce/speech-datasets
|
48a935727c38d150e3b86b99bdda65e0afd69920
|
[
"Apache-2.0"
] | null | null | null |
speech_datasets/utils/__init__.py
|
salesforce/speech-datasets
|
48a935727c38d150e3b86b99bdda65e0afd69920
|
[
"Apache-2.0"
] | 1
|
2021-09-19T08:44:56.000Z
|
2021-09-19T08:44:56.000Z
|
"""Initialize sub package & bring general util into this namespace."""
from speech_datasets.utils.misc import get_root, check_kwargs, dynamic_import, set_deterministic_pytorch
| 58.666667
| 104
| 0.829545
| 24
| 176
| 5.833333
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096591
| 176
| 2
| 105
| 88
| 0.880503
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0c9b6c3dc09e7d0ddc4eb18c8fe22ab97c5150ad
| 242
|
py
|
Python
|
zappy/api.py
|
OpenMDAO/zappy
|
2c72048b4c4e0ce0ae83221e4ee5788978254340
|
[
"Apache-2.0"
] | 1
|
2022-02-18T22:41:37.000Z
|
2022-02-18T22:41:37.000Z
|
zappy/api.py
|
OpenMDAO/zappy
|
2c72048b4c4e0ce0ae83221e4ee5788978254340
|
[
"Apache-2.0"
] | null | null | null |
zappy/api.py
|
OpenMDAO/zappy
|
2c72048b4c4e0ce0ae83221e4ee5788978254340
|
[
"Apache-2.0"
] | null | null | null |
from .LF_elements.bus import ACbus, DCbus
from .LF_elements.line import ACline, DCline
from .LF_elements.generator import ACgenerator, DCgenerator
from .LF_elements.load import ACload, DCload
from .LF_elements.converter import Converter
| 40.333333
| 60
| 0.822314
| 34
| 242
| 5.705882
| 0.5
| 0.154639
| 0.360825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119835
| 242
| 5
| 61
| 48.4
| 0.910798
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0cc8be06eeb9a7185a99cbce65f729bc05e75e73
| 147
|
py
|
Python
|
cli/create/commands.py
|
soonbee/cli-template
|
6563940f0ceda981b1d5513551fd12077f849be1
|
[
"MIT"
] | null | null | null |
cli/create/commands.py
|
soonbee/cli-template
|
6563940f0ceda981b1d5513551fd12077f849be1
|
[
"MIT"
] | null | null | null |
cli/create/commands.py
|
soonbee/cli-template
|
6563940f0ceda981b1d5513551fd12077f849be1
|
[
"MIT"
] | null | null | null |
import click
@click.group('create')
def command_group():
pass
@command_group.command()
def something():
click.echo('create something')
| 12.25
| 34
| 0.70068
| 18
| 147
| 5.611111
| 0.5
| 0.237624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156463
| 147
| 11
| 35
| 13.363636
| 0.814516
| 0
| 0
| 0
| 0
| 0
| 0.14966
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0.142857
| 0.142857
| 0
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
0b4155cbaf66c68336d5fd38afab5229e810c82b
| 220
|
py
|
Python
|
pycombo/__init__.py
|
Casyfill/pyCOMBO
|
9590cbc94644ad186b3a575597eade2d936f834b
|
[
"MIT"
] | 14
|
2016-10-05T06:31:43.000Z
|
2022-01-13T11:26:01.000Z
|
pycombo/__init__.py
|
Casyfill/pyCOMBO
|
9590cbc94644ad186b3a575597eade2d936f834b
|
[
"MIT"
] | 50
|
2019-10-02T09:55:20.000Z
|
2022-03-31T20:23:30.000Z
|
pycombo/__init__.py
|
Casyfill/pyCOMBO
|
9590cbc94644ad186b3a575597eade2d936f834b
|
[
"MIT"
] | 2
|
2019-12-03T18:58:20.000Z
|
2021-02-02T08:02:10.000Z
|
try:
import importlib.metadata as importlib_metadata
except ModuleNotFoundError:
import importlib_metadata
__version__ = importlib_metadata.version(__name__)
from .pyCombo import execute
__all__ = ["execute"]
| 20
| 51
| 0.804545
| 23
| 220
| 7.043478
| 0.565217
| 0.419753
| 0.283951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 220
| 10
| 52
| 22
| 0.852632
| 0
| 0
| 0
| 0
| 0
| 0.031818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.571429
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0b585cc542b756afcc31bf6388c626e3d5b7ce35
| 5,713
|
py
|
Python
|
cli/cli/src/tests/test_json_display.py
|
nbwhite/dai-ds
|
fc6da289e43277927493f0b7e6232955898e9a2e
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2020-01-06T19:40:55.000Z
|
2021-11-03T19:30:05.000Z
|
cli/cli/src/tests/test_json_display.py
|
nbwhite/dai-ds
|
fc6da289e43277927493f0b7e6232955898e9a2e
|
[
"ECL-2.0",
"Apache-2.0"
] | 83
|
2020-01-08T18:56:39.000Z
|
2022-03-28T22:40:27.000Z
|
cli/cli/src/tests/test_json_display.py
|
nbwhite/dai-ds
|
fc6da289e43277927493f0b7e6232955898e9a2e
|
[
"ECL-2.0",
"Apache-2.0"
] | 23
|
2020-01-02T20:09:12.000Z
|
2022-02-16T13:31:00.000Z
|
# -*- coding: utf-8 -*-
# !/usr/bin/env python
# Copyright (C) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
"""
Test the JSON Display class in cli implementation.
"""
import json
from unittest import TestCase
from ..json_display import JsonDisplay
class TestJsonDisplay(TestCase):
def test_positive_status_code(self):
json_display = JsonDisplay(json.dumps({"result-data-columns": 3, "result-status-code": 0,
"result-data-lines": 3,
"schema": [{"unit": "string", "data": "sub_property_name",
"heading": "sub_property_name"},
{"unit": "string", "data": "actual", "heading": "actual"},
{"unit": "string", "data": "reference",
"heading": "reference"}],
"data": [["kernel_version", "31.2", "42.3"], ["os_version", "", ""],
["version_level", "", ""]]}))
self.assertIn('ACTUAL', json_display.display_json_in_tabular_format())
def test_non_positive_status_code(self):
json_display = JsonDisplay(json.dumps({"result-data-columns": 3, "result-status-code": 1,
"result-data-lines": 3,
"schema": [{"unit": "string", "data": "sub_property_name",
"heading": "sub_property_name"},
{"unit": "string", "data": "actual", "heading": "actual"},
{"unit": "string", "data": "reference",
"heading": "reference"}]}))
with self.assertRaises(RuntimeError):
json_display.display_json_in_tabular_format()
def test_zero_columns_in_schema_returned(self):
json_display = JsonDisplay(json.dumps({"result-data-columns": 0, "result-status-code": 0,
"result-data-lines": 3,
"schema": [], "data":[[]]}))
with self.assertRaises(RuntimeError):
json_display.display_json_in_tabular_format()
def test_zero_data_lines_returned(self):
json_display = JsonDisplay(json.dumps({"result-data-columns": 3, "result-status-code": 0,
"result-data-lines": 0,
"schema": [{"unit": "string", "data": "sub_property_name",
"heading": "sub_property_name"},
{"unit": "string", "data": "actual", "heading": "actual"},
{"unit": "string", "data": "reference",
"heading": "reference"}],
"data": [[]]}))
self.assertTrue("No data returned." in json_display.display_json_in_tabular_format())
def test_json_missing_filed_key_error(self):
json_display = JsonDisplay(json.dumps({"result-status-code": 0,
"result-data-lines": 0,
"schema": [{"unit": "string", "data": "sub_property_name",
"heading": "sub_property_name"},
{"unit": "string", "data": "actual", "heading": "actual"},
{"unit": "string", "data": "reference",
"heading": "reference"}],
"data": [[]]}))
with self.assertRaises(RuntimeError):
json_display.display_json_in_tabular_format()
def test_empty_json_data_to_display(self):
with self.assertRaises(RuntimeError):
JsonDisplay(None)
with self.assertRaises(RuntimeError):
JsonDisplay([])
def test_display_raw_json(self):
json_display = JsonDisplay(json.dumps({"result-data-columns": 3, "result-status-code": 0,
"result-data-lines": 3,
"schema": [{"unit": "string", "data": "sub_property_name",
"heading": "sub_property_name"},
{"unit": "string", "data": "actual", "heading": "actual"},
{"unit": "string", "data": "reference",
"heading": "reference"}],
"data": [["kernel_version", "31.2", "42.3"], ["os_version", "", ""],
["version_level", "", ""]]}))
self.assertEqual([{'actual': '31.2', 'reference': '42.3', 'sub_property_name': 'kernel_version'},
{'actual': '', 'reference': '', 'sub_property_name': 'os_version'},
{'actual': '', 'reference': '', 'sub_property_name': 'version_level'}],
json.loads(json_display.display_raw_json()))
| 64.191011
| 116
| 0.414493
| 430
| 5,713
| 5.276744
| 0.186047
| 0.066108
| 0.092552
| 0.068753
| 0.789335
| 0.751432
| 0.718819
| 0.703394
| 0.703394
| 0.647422
| 0
| 0.013419
| 0.452127
| 5,713
| 88
| 117
| 64.920455
| 0.711502
| 0.029232
| 0
| 0.661972
| 0
| 0
| 0.225876
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 1
| 0.098592
| false
| 0
| 0.042254
| 0
| 0.15493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0b81987f9255fe722bbc08768f849c6611e34724
| 69
|
py
|
Python
|
vqa_experiments/configs/config_test.py
|
Bidur-Khanal/REMIND
|
4eeb6bce7a27d814c94948e2790efedacd014af1
|
[
"MIT"
] | 67
|
2020-06-29T14:30:40.000Z
|
2022-02-24T06:14:50.000Z
|
vqa_experiments/configs/config_test.py
|
msrocean/REMIND
|
2e82ca75a3e4d4ccba00c5a763097cc0f650a0a4
|
[
"MIT"
] | 5
|
2020-08-14T17:01:39.000Z
|
2021-09-12T10:41:25.000Z
|
vqa_experiments/configs/config_test.py
|
msrocean/REMIND
|
2e82ca75a3e4d4ccba00c5a763097cc0f650a0a4
|
[
"MIT"
] | 19
|
2020-07-04T14:59:26.000Z
|
2022-02-15T11:24:52.000Z
|
"""
Written by Kushal, modified by Robik
"""
import torch
import sys
| 11.5
| 36
| 0.724638
| 10
| 69
| 5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 69
| 5
| 37
| 13.8
| 0.877193
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0b8e15fe492559daf3061a4c8d280f46ed29f071
| 710
|
py
|
Python
|
mocks/categories.py
|
budgetsapp/ba-api
|
cbe6624b5e0178e981f464af48275027bb562126
|
[
"MIT"
] | null | null | null |
mocks/categories.py
|
budgetsapp/ba-api
|
cbe6624b5e0178e981f464af48275027bb562126
|
[
"MIT"
] | 12
|
2020-01-22T14:22:08.000Z
|
2021-06-10T22:34:26.000Z
|
mocks/categories.py
|
budgetsapp/ba-api
|
cbe6624b5e0178e981f464af48275027bb562126
|
[
"MIT"
] | null | null | null |
all_categories = [{
"id": "123e4567-e89b-12d3-a456-426655440001",
"user_id": "123e4567-e89b-12d3-a456-426655440000",
"display_name": "taxi"
}, {
"id": "123e4567-e89b-12d3-a456-426655440002",
"user_id": "123e4567-e89b-12d3-a456-426655440000",
"display_name": "cafe"
}, {
"id": "123e4567-e89b-12d3-a456-426655440003",
"user_id": "123e4567-e89b-12d3-a456-426655440000",
"display_name": "cinema"
}, {
"id": "123e4567-e89b-12d3-a456-426655440004",
"user_id": "123e4567-e89b-12d3-a456-426655440000",
"display_name": "bus"
}]
def getCategoryById(categery_id):
for cat in all_categories:
if cat["id"] == categery_id:
return cat
return None
| 28.4
| 54
| 0.646479
| 84
| 710
| 5.321429
| 0.333333
| 0.178971
| 0.250559
| 0.322148
| 0.635347
| 0.438479
| 0.438479
| 0.438479
| 0.438479
| 0
| 0
| 0.369863
| 0.177465
| 710
| 24
| 55
| 29.583333
| 0.395548
| 0
| 0
| 0.318182
| 0
| 0
| 0.550704
| 0.405634
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0bac47baf6ec064ad197ff768338faec744d535c
| 1,521
|
py
|
Python
|
resources/dot_PyCharm/system/python_stubs/-762174762/PySide/QtGui/QTextTableCellFormat.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | 1
|
2020-04-20T02:27:20.000Z
|
2020-04-20T02:27:20.000Z
|
resources/dot_PyCharm/system/python_stubs/cache/8cdc475d469a13122bc4bc6c3ac1c215d93d5f120f5cc1ef33a8f3088ee54d8e/PySide/QtGui/QTextTableCellFormat.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | null | null | null |
resources/dot_PyCharm/system/python_stubs/cache/8cdc475d469a13122bc4bc6c3ac1c215d93d5f120f5cc1ef33a8f3088ee54d8e/PySide/QtGui/QTextTableCellFormat.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module PySide.QtGui
# from C:\Python27\lib\site-packages\PySide\QtGui.pyd
# by generator 1.147
# no doc
# imports
import PySide.QtCore as __PySide_QtCore
import Shiboken as __Shiboken
from QTextCharFormat import QTextCharFormat
class QTextTableCellFormat(QTextCharFormat):
# no doc
def bottomPadding(self, *args, **kwargs): # real signature unknown
pass
def isValid(self, *args, **kwargs): # real signature unknown
pass
def leftPadding(self, *args, **kwargs): # real signature unknown
pass
def rightPadding(self, *args, **kwargs): # real signature unknown
pass
def setBottomPadding(self, *args, **kwargs): # real signature unknown
pass
def setLeftPadding(self, *args, **kwargs): # real signature unknown
pass
def setPadding(self, *args, **kwargs): # real signature unknown
pass
def setRightPadding(self, *args, **kwargs): # real signature unknown
pass
def setTopPadding(self, *args, **kwargs): # real signature unknown
pass
def topPadding(self, *args, **kwargs): # real signature unknown
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
| 26.224138
| 77
| 0.657462
| 178
| 1,521
| 5.455056
| 0.342697
| 0.174047
| 0.267765
| 0.222451
| 0.503605
| 0.503605
| 0.503605
| 0.46447
| 0
| 0
| 0
| 0.006055
| 0.239974
| 1,521
| 57
| 78
| 26.684211
| 0.83391
| 0.3524
| 0
| 0.419355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.419355
| false
| 0.419355
| 0.096774
| 0
| 0.548387
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
e7eebaba808e645ecbf4b0a9b18b80ccaff7281d
| 32,554
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_ipv6_io_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_ipv6_io_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_ipv6_io_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'Ipv6Io.Nodes.Node.Statistics.Traffic.Ipv6' : {
'meta_info' : _MetaInfoClass('Ipv6Io.Nodes.Node.Statistics.Traffic.Ipv6',
False,
[
_MetaInfoClassMember('bad-header-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Bad Header Packets
''',
'bad_header_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('bad-source-address-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Bad Source Address Packets
''',
'bad_source_address_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('format-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Format Errors
''',
'format_errors',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('forwarded-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Packets Forwarded
''',
'forwarded_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('fragment-count', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Fragmented Packet Count
''',
'fragment_count',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('fragment-failures', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Fragment Failures
''',
'fragment_failures',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('fragmented-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Packets Fragmented
''',
'fragmented_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('fragments', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Fragments
''',
'fragments',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('generated-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Packets Output
''',
'generated_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('hop-count-exceeded-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Hop Count Exceeded Packets
''',
'hop_count_exceeded_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('lisp-decap-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lisp Decap errors
''',
'lisp_decap_errors',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('lisp-encap-errors', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lisp Encap errors
''',
'lisp_encap_errors',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('lisp-v4-decap-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lisp IPv4 Decapped packets
''',
'lisp_v4_decap_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('lisp-v4-encap-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lisp IPv4 Encapped packets
''',
'lisp_v4_encap_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('lisp-v6-decap-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lisp IPv6 Decapped packets
''',
'lisp_v6_decap_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('lisp-v6-encap-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lisp IPv6 Encapped packets
''',
'lisp_v6_encap_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('local-destination-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Local Destination Packets
''',
'local_destination_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('miscellaneous-drops', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Misc. drops
''',
'miscellaneous_drops',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('no-route-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' No Route Packets
''',
'no_route_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('reassembled-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Reassembled Packets
''',
'reassembled_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('reassembly-failures', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Reassembly Failures
''',
'reassembly_failures',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('reassembly-maximum-drops', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Reassembly Reach Maximum Drop
''',
'reassembly_maximum_drops',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('reassembly-timeouts', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Reassembly Timeouts
''',
'reassembly_timeouts',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-multicast-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Multicast In
''',
'received_multicast_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-multicast-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Multicast Out
''',
'sent_multicast_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('source-routed-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Packets Source Routed
''',
'source_routed_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('too-big-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Packet Too Big
''',
'too_big_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('total-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Total Packets
''',
'total_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('truncated-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Truncated Packets
''',
'truncated_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('unknown-option-type-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Unknown Option Type Packets
''',
'unknown_option_type_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('unknown-protocol-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Unknown Protocol Packets
''',
'unknown_protocol_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
],
'Cisco-IOS-XR-ipv6-io-oper',
'ipv6',
_yang_ns._namespaces['Cisco-IOS-XR-ipv6-io-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper'
),
},
'Ipv6Io.Nodes.Node.Statistics.Traffic.Icmp' : {
'meta_info' : _MetaInfoClass('Ipv6Io.Nodes.Node.Statistics.Traffic.Icmp',
False,
[
_MetaInfoClassMember('checksum-error-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Checksum Errors
''',
'checksum_error_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('output-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Transmitted
''',
'output_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-echo-reply-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Echo Reply Received
''',
'received_echo_reply_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-echo-request-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Echo Request Received
''',
'received_echo_request_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-hop-count-expired-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Hop Count Expired Received
''',
'received_hop_count_expired_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-parameter-error-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Parameter Error Messages Received
''',
'received_parameter_error_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-parameter-header-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Parameter Next Header Messages Received
''',
'received_parameter_header_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-parameter-option-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Parameter Option Problem Received
''',
'received_parameter_option_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-parameter-unknown-type-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Parameter Unknown Type Messages Received
''',
'received_parameter_unknown_type_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-reassembly-timeouts', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Reassembly Timeouts
''',
'received_reassembly_timeouts',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-too-big-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Too Big Messages Received
''',
'received_too_big_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-unknown-timeout-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Unknown Timeout Messages Received
''',
'received_unknown_timeout_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-unreachable-address-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Addr Unreachable Received
''',
'received_unreachable_address_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-unreachable-admin-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Admin Unreachable Received
''',
'received_unreachable_admin_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-unreachable-neighbor-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Host Unreachable Received
''',
'received_unreachable_neighbor_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-unreachable-port-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Port Unreachable Received
''',
'received_unreachable_port_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-unreachable-routing-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Route Unreachable Received
''',
'received_unreachable_routing_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-unreachable-unknown-type-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Unreachable Unknown Messages Received
''',
'received_unreachable_unknown_type_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-echo-reply-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Echo Reply Sent
''',
'sent_echo_reply_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-echo-request-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Echo Request Sent
''',
'sent_echo_request_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-hop-count-expired-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Hop Count Expired Sent
''',
'sent_hop_count_expired_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-parameter-error-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Parameter Error Messages Sent
''',
'sent_parameter_error_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-parameter-header-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Parameter Next Header Messages Sent
''',
'sent_parameter_header_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-parameter-option-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Parameter Option Messages Sent
''',
'sent_parameter_option_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-parameter-unknown-type-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Parameter Unknown Type Messages Sent
''',
'sent_parameter_unknown_type_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-rate-limited-packets', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Sent Packets Ratelimited
''',
'sent_rate_limited_packets',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-reassembly-timeouts', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Reassembly Timeouts
''',
'sent_reassembly_timeouts',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-too-big-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Too Big Messages Sent
''',
'sent_too_big_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-unknown-timeout-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Unknown Timeout Messages Sent
''',
'sent_unknown_timeout_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-unreachable-address-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Addr Unreachable Sent
''',
'sent_unreachable_address_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-unreachable-admin-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Admin Unreachable Sent
''',
'sent_unreachable_admin_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-unreachable-neighbor-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Host Unreachable Sent
''',
'sent_unreachable_neighbor_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-unreachable-port-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Port Unreachable Sent
''',
'sent_unreachable_port_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-unreachable-routing-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Route Unreachable Sent
''',
'sent_unreachable_routing_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-unreachable-unknown-type-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Unreachable Unknown Messages Sent
''',
'sent_unreachable_unknown_type_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('too-short-error-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Too Short Errors
''',
'too_short_error_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('total-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Received
''',
'total_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('unknown-error-type-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Unknown Error
''',
'unknown_error_type_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
],
'Cisco-IOS-XR-ipv6-io-oper',
'icmp',
_yang_ns._namespaces['Cisco-IOS-XR-ipv6-io-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper'
),
},
'Ipv6Io.Nodes.Node.Statistics.Traffic.Ipv6NodeDiscovery' : {
'meta_info' : _MetaInfoClass('Ipv6Io.Nodes.Node.Statistics.Traffic.Ipv6NodeDiscovery',
False,
[
_MetaInfoClassMember('received-neighbor-advertisement-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Neighbor Advertisements Received
''',
'received_neighbor_advertisement_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-neighbor-solicitation-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Neighbor Solicitations Received
''',
'received_neighbor_solicitation_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-redirect-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Redirect Received
''',
'received_redirect_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-router-advertisement-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Router Advertisements Received
''',
'received_router_advertisement_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('received-router-solicitation-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Router Solicitations Received
''',
'received_router_solicitation_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-neighbor-advertisement-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Neighbor Advertisements Sent
''',
'sent_neighbor_advertisement_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-neighbor-solicitation-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Neighbor Solicitations Sent
''',
'sent_neighbor_solicitation_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-redirect-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Redirect Sent
''',
'sent_redirect_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-router-advertisement-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Router Advertisements Sent
''',
'sent_router_advertisement_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('sent-router-solicitation-messages', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ICMP Router Solicitations Sent
''',
'sent_router_solicitation_messages',
'Cisco-IOS-XR-ipv6-io-oper', False),
],
'Cisco-IOS-XR-ipv6-io-oper',
'ipv6-node-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-ipv6-io-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper'
),
},
'Ipv6Io.Nodes.Node.Statistics.Traffic' : {
'meta_info' : _MetaInfoClass('Ipv6Io.Nodes.Node.Statistics.Traffic',
False,
[
_MetaInfoClassMember('icmp', REFERENCE_CLASS, 'Icmp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper', 'Ipv6Io.Nodes.Node.Statistics.Traffic.Icmp',
[], [],
''' ICMP Statistics
''',
'icmp',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('ipv6', REFERENCE_CLASS, 'Ipv6' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper', 'Ipv6Io.Nodes.Node.Statistics.Traffic.Ipv6',
[], [],
''' IPv6 Statistics
''',
'ipv6',
'Cisco-IOS-XR-ipv6-io-oper', False),
_MetaInfoClassMember('ipv6-node-discovery', REFERENCE_CLASS, 'Ipv6NodeDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper', 'Ipv6Io.Nodes.Node.Statistics.Traffic.Ipv6NodeDiscovery',
[], [],
''' IPv6 Node Discovery Statistics
''',
'ipv6_node_discovery',
'Cisco-IOS-XR-ipv6-io-oper', False),
],
'Cisco-IOS-XR-ipv6-io-oper',
'traffic',
_yang_ns._namespaces['Cisco-IOS-XR-ipv6-io-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper'
),
},
'Ipv6Io.Nodes.Node.Statistics' : {
'meta_info' : _MetaInfoClass('Ipv6Io.Nodes.Node.Statistics',
False,
[
_MetaInfoClassMember('traffic', REFERENCE_CLASS, 'Traffic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper', 'Ipv6Io.Nodes.Node.Statistics.Traffic',
[], [],
''' Traffic statistics for a node
''',
'traffic',
'Cisco-IOS-XR-ipv6-io-oper', False),
],
'Cisco-IOS-XR-ipv6-io-oper',
'statistics',
_yang_ns._namespaces['Cisco-IOS-XR-ipv6-io-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper'
),
},
'Ipv6Io.Nodes.Node' : {
'meta_info' : _MetaInfoClass('Ipv6Io.Nodes.Node',
False,
[
_MetaInfoClassMember('node-name', ATTRIBUTE, 'str' , None, None,
[], ['([a-zA-Z0-9_]*\\d+/){1,2}([a-zA-Z0-9_]*\\d+)'],
''' Node name
''',
'node_name',
'Cisco-IOS-XR-ipv6-io-oper', True),
_MetaInfoClassMember('statistics', REFERENCE_CLASS, 'Statistics' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper', 'Ipv6Io.Nodes.Node.Statistics',
[], [],
''' Statistical IPv6 network operational data for
a node
''',
'statistics',
'Cisco-IOS-XR-ipv6-io-oper', False),
],
'Cisco-IOS-XR-ipv6-io-oper',
'node',
_yang_ns._namespaces['Cisco-IOS-XR-ipv6-io-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper'
),
},
'Ipv6Io.Nodes' : {
'meta_info' : _MetaInfoClass('Ipv6Io.Nodes',
False,
[
_MetaInfoClassMember('node', REFERENCE_LIST, 'Node' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper', 'Ipv6Io.Nodes.Node',
[], [],
''' IPv6 network operational data for a particular
node
''',
'node',
'Cisco-IOS-XR-ipv6-io-oper', False),
],
'Cisco-IOS-XR-ipv6-io-oper',
'nodes',
_yang_ns._namespaces['Cisco-IOS-XR-ipv6-io-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper'
),
},
'Ipv6Io' : {
'meta_info' : _MetaInfoClass('Ipv6Io',
False,
[
_MetaInfoClassMember('nodes', REFERENCE_CLASS, 'Nodes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper', 'Ipv6Io.Nodes',
[], [],
''' Node-specific IPv6 IO operational data
''',
'nodes',
'Cisco-IOS-XR-ipv6-io-oper', False),
],
'Cisco-IOS-XR-ipv6-io-oper',
'ipv6-io',
_yang_ns._namespaces['Cisco-IOS-XR-ipv6-io-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv6_io_oper'
),
},
}
_meta_table['Ipv6Io.Nodes.Node.Statistics.Traffic.Ipv6']['meta_info'].parent =_meta_table['Ipv6Io.Nodes.Node.Statistics.Traffic']['meta_info']
_meta_table['Ipv6Io.Nodes.Node.Statistics.Traffic.Icmp']['meta_info'].parent =_meta_table['Ipv6Io.Nodes.Node.Statistics.Traffic']['meta_info']
_meta_table['Ipv6Io.Nodes.Node.Statistics.Traffic.Ipv6NodeDiscovery']['meta_info'].parent =_meta_table['Ipv6Io.Nodes.Node.Statistics.Traffic']['meta_info']
_meta_table['Ipv6Io.Nodes.Node.Statistics.Traffic']['meta_info'].parent =_meta_table['Ipv6Io.Nodes.Node.Statistics']['meta_info']
_meta_table['Ipv6Io.Nodes.Node.Statistics']['meta_info'].parent =_meta_table['Ipv6Io.Nodes.Node']['meta_info']
_meta_table['Ipv6Io.Nodes.Node']['meta_info'].parent =_meta_table['Ipv6Io.Nodes']['meta_info']
_meta_table['Ipv6Io.Nodes']['meta_info'].parent =_meta_table['Ipv6Io']['meta_info']
| 51.105181
| 206
| 0.487467
| 2,717
| 32,554
| 5.65403
| 0.057416
| 0.069262
| 0.086577
| 0.107538
| 0.830361
| 0.825023
| 0.807838
| 0.762856
| 0.703294
| 0.599206
| 0
| 0.052069
| 0.371106
| 32,554
| 636
| 207
| 51.185535
| 0.698286
| 0
| 0
| 0.42963
| 0
| 0.001852
| 0.378756
| 0.291225
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014815
| 0
| 0.014815
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f01dfae74967a7044f736b5031c8a8f318d63de2
| 30
|
py
|
Python
|
data_processor/__init__.py
|
ruoygao/autoloadtest
|
270d0b952200c597d0ef5a953a6088b6c529cb71
|
[
"MIT"
] | 1
|
2017-06-08T06:16:51.000Z
|
2017-06-08T06:16:51.000Z
|
data_processor/__init__.py
|
ruoygao/autoloadtest
|
270d0b952200c597d0ef5a953a6088b6c529cb71
|
[
"MIT"
] | null | null | null |
data_processor/__init__.py
|
ruoygao/autoloadtest
|
270d0b952200c597d0ef5a953a6088b6c529cb71
|
[
"MIT"
] | null | null | null |
#__all__ = ['data_downloader']
| 30
| 30
| 0.733333
| 3
| 30
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 30
| 1
| 30
| 30
| 0.607143
| 0.966667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f02782d0a634dc632753c1d1e54be5358c8b5e52
| 88
|
py
|
Python
|
lux/core/commands/project_template/manage.py
|
quantmind/lux
|
7318fcd86c77616aada41d8182a04339680a554c
|
[
"BSD-3-Clause"
] | 21
|
2015-03-28T23:27:43.000Z
|
2020-11-23T13:24:10.000Z
|
lux/core/commands/project_template/manage.py
|
quantmind/lux
|
7318fcd86c77616aada41d8182a04339680a554c
|
[
"BSD-3-Clause"
] | 195
|
2015-02-18T17:22:28.000Z
|
2017-12-01T23:01:16.000Z
|
lux/core/commands/project_template/manage.py
|
quantmind/lux
|
7318fcd86c77616aada41d8182a04339680a554c
|
[
"BSD-3-Clause"
] | 16
|
2015-03-31T23:15:38.000Z
|
2017-04-18T11:59:43.000Z
|
if __name__ == '__main__':
import {{ project_name }}
{{ project_name }}.main()
| 17.6
| 29
| 0.590909
| 9
| 88
| 4.666667
| 0.555556
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 88
| 4
| 30
| 22
| 0.617647
| 0
| 0
| 0
| 0
| 0
| 0.091954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f07a6b4c96ec8c0ae301fe17c18a0ac550cfac31
| 67
|
py
|
Python
|
CodeWars/Python/8 kyu/Even or Odd/main.py
|
opastushkov/codewars-solutions
|
0132a24259a4e87f926048318332dcb4d94858ca
|
[
"MIT"
] | null | null | null |
CodeWars/Python/8 kyu/Even or Odd/main.py
|
opastushkov/codewars-solutions
|
0132a24259a4e87f926048318332dcb4d94858ca
|
[
"MIT"
] | null | null | null |
CodeWars/Python/8 kyu/Even or Odd/main.py
|
opastushkov/codewars-solutions
|
0132a24259a4e87f926048318332dcb4d94858ca
|
[
"MIT"
] | null | null | null |
def even_or_odd(number):
return "Odd" if number % 2 else "Even"
| 33.5
| 42
| 0.686567
| 12
| 67
| 3.666667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018519
| 0.19403
| 67
| 2
| 42
| 33.5
| 0.796296
| 0
| 0
| 0
| 0
| 0
| 0.102941
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
f07c14e7abae3b4a63ff72463f590692ccbc538d
| 3,078
|
py
|
Python
|
tests/jtr/nn/kbp/test_base.py
|
mitchelljeff/SUMMAD4.3
|
33bb3a74cff16a7aa699660a08d98ddcd662cad5
|
[
"MIT"
] | 1
|
2017-09-15T14:06:07.000Z
|
2017-09-15T14:06:07.000Z
|
tests/jtr/nn/kbp/test_base.py
|
mitchelljeff/SUMMAD4.3
|
33bb3a74cff16a7aa699660a08d98ddcd662cad5
|
[
"MIT"
] | null | null | null |
tests/jtr/nn/kbp/test_base.py
|
mitchelljeff/SUMMAD4.3
|
33bb3a74cff16a7aa699660a08d98ddcd662cad5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
import tensorflow as tf
from jtr.nn.kbp.base import TranslatingModel, BilinearDiagonalModel, BilinearModel
from jtr.nn.kbp import similarities
def test_translating_embeddings_score():
batch_size = 5
embedding_size = 10
rs = np.random.RandomState(0)
E = rs.rand(batch_size, 2, embedding_size)
R = rs.rand(batch_size, 1, embedding_size)
vE = tf.Variable(E, name='E')
vR = tf.Variable(R, name='R')
model = TranslatingModel(subject_embeddings=vE[:, 0, :],
predicate_embeddings=vR[:, 0, :],
object_embeddings=vE[:, 1, :],
similarity_function=similarities.negative_l1_distance)
scores = model()
init_op = tf.initialize_all_variables()
with tf.Session() as session:
session.run(init_op)
scores_value = session.run(scores)
assert(scores_value.shape[0] == batch_size)
tmp = - np.sum(np.abs(E[:, 0, :] + R[:, 0, :] - E[:, 1, :]), axis=1)
assert(np.isclose(scores_value, tmp).all())
def test_bilinear_diagonal_score():
batch_size = 5
embedding_size = 10
rs = np.random.RandomState(0)
E = rs.rand(batch_size, 2, embedding_size)
R = rs.rand(batch_size, 1, embedding_size)
vE = tf.Variable(E, name='E')
vR = tf.Variable(R, name='R')
model = BilinearDiagonalModel(subject_embeddings=vE[:, 0, :],
predicate_embeddings=vR[:, 0, :],
object_embeddings=vE[:, 1, :],
similarity_function=similarities.negative_l1_distance)
scores = model()
init_op = tf.initialize_all_variables()
with tf.Session() as session:
session.run(init_op)
scores_value = session.run(scores)
assert(scores_value.shape[0] == batch_size)
tmp = - np.sum(np.abs(E[:, 0, :] * R[:, 0, :] - E[:, 1, :]), axis=1)
assert(np.isclose(scores_value, tmp).all())
def test_bilinear_score():
batch_size = 5
entity_embedding_size = 2
predicate_embedding_size = 4
rs = np.random.RandomState(0)
E = rs.rand(batch_size, 2, entity_embedding_size)
R = rs.rand(batch_size, 1, predicate_embedding_size)
vE = tf.Variable(E, name='E')
vR = tf.Variable(R, name='R')
model = BilinearModel(subject_embeddings=vE[:, 0, :],
predicate_embeddings=vR[:, 0, :],
object_embeddings=vE[:, 1, :],
similarity_function=similarities.dot_product)
scores = model()
init_op = tf.initialize_all_variables()
with tf.Session() as session:
session.run(init_op)
scores_value = session.run(scores)
assert (scores_value.shape[0] == batch_size)
for i in range(batch_size):
es, eo = E[i, 0, :], E[i, 1, :]
ep = np.reshape(R[i, 0, :], (entity_embedding_size, entity_embedding_size))
np.testing.assert_allclose(scores_value[i], np.dot(np.dot(es, ep), eo))
| 30.475248
| 88
| 0.591618
| 390
| 3,078
| 4.469231
| 0.212821
| 0.067126
| 0.037866
| 0.051635
| 0.742398
| 0.742398
| 0.742398
| 0.742398
| 0.725186
| 0.725186
| 0
| 0.019678
| 0.273554
| 3,078
| 100
| 89
| 30.78
| 0.759839
| 0.006823
| 0
| 0.686567
| 0
| 0
| 0.001964
| 0
| 0
| 0
| 0
| 0
| 0.089552
| 1
| 0.044776
| false
| 0
| 0.059701
| 0
| 0.104478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f07dc5297df6fc2fd2314161004fda408bd72284
| 57
|
py
|
Python
|
scattertext/smoothing/__init__.py
|
jairoruizsaenz/scattertext
|
5d96f62434057cc26ed90a1d0b314984e4ef90f8
|
[
"Apache-2.0"
] | 1,823
|
2016-07-28T00:25:56.000Z
|
2022-03-30T12:33:57.000Z
|
scattertext/smoothing/__init__.py
|
jairoruizsaenz/scattertext
|
5d96f62434057cc26ed90a1d0b314984e4ef90f8
|
[
"Apache-2.0"
] | 92
|
2016-07-28T23:13:20.000Z
|
2022-01-24T03:53:38.000Z
|
scattertext/smoothing/__init__.py
|
jairoruizsaenz/scattertext
|
5d96f62434057cc26ed90a1d0b314984e4ef90f8
|
[
"Apache-2.0"
] | 271
|
2016-12-26T12:56:08.000Z
|
2022-03-24T19:35:13.000Z
|
from . import lowess, mean_isotonic, sigmoidal, power_law
| 57
| 57
| 0.824561
| 8
| 57
| 5.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 57
| 1
| 57
| 57
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b2c485b9333b119261ed7708f89fb94ba9494402
| 52
|
py
|
Python
|
test.py
|
killgill/SPDD
|
93af015dc2ee60836d1d76d70b0a038b11052de9
|
[
"MIT"
] | null | null | null |
test.py
|
killgill/SPDD
|
93af015dc2ee60836d1d76d70b0a038b11052de9
|
[
"MIT"
] | 1
|
2018-11-14T19:43:04.000Z
|
2018-11-14T19:43:04.000Z
|
test.py
|
killgill/SPDD
|
93af015dc2ee60836d1d76d70b0a038b11052de9
|
[
"MIT"
] | null | null | null |
from google_local import *
gs_pour('3453909285',10)
| 17.333333
| 26
| 0.788462
| 8
| 52
| 4.875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.255319
| 0.096154
| 52
| 3
| 27
| 17.333333
| 0.574468
| 0
| 0
| 0
| 0
| 0
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
b2e6d60f1f624b57965436b5725dcf67ed804cb3
| 60
|
py
|
Python
|
scrapers/modules/__init__.py
|
skytalemcc/OffshoreNewsHub
|
56a12fba8bf740084f988f88134238ab297bb23d
|
[
"MIT"
] | null | null | null |
scrapers/modules/__init__.py
|
skytalemcc/OffshoreNewsHub
|
56a12fba8bf740084f988f88134238ab297bb23d
|
[
"MIT"
] | null | null | null |
scrapers/modules/__init__.py
|
skytalemcc/OffshoreNewsHub
|
56a12fba8bf740084f988f88134238ab297bb23d
|
[
"MIT"
] | null | null | null |
from .tw_logger import logger
from .dingding import DingDing
| 30
| 30
| 0.85
| 9
| 60
| 5.555556
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 60
| 2
| 30
| 30
| 0.943396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
650802e21a080ed48094d65884b47624bbe1d6b2
| 136
|
py
|
Python
|
python/8kyu/Grasshoppper_function_syntax_debugging.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 3
|
2021-06-08T01:57:13.000Z
|
2021-06-26T10:52:47.000Z
|
python/8kyu/Grasshoppper_function_syntax_debugging.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | null | null | null |
python/8kyu/Grasshoppper_function_syntax_debugging.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 2
|
2021-06-10T21:20:13.000Z
|
2021-06-30T10:13:26.000Z
|
"""Kata url: https://www.codewars.com/kata/56dae9dc54c0acd29d00109a."""
def main(verb: str, noun: str) -> str:
return verb + noun
| 22.666667
| 71
| 0.676471
| 18
| 136
| 5.111111
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112069
| 0.147059
| 136
| 5
| 72
| 27.2
| 0.681034
| 0.477941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
650c4287fb76a4ca456b4444ea35da9754c6eba3
| 37,884
|
py
|
Python
|
svca_limix/limix/deprecated/archive/qtl_old.py
|
DenisSch/svca
|
bd029c120ca8310f43311253e4d7ce19bc08350c
|
[
"Apache-2.0"
] | 65
|
2015-01-20T20:46:26.000Z
|
2021-06-27T14:40:35.000Z
|
svca_limix/limix/deprecated/archive/qtl_old.py
|
DenisSch/svca
|
bd029c120ca8310f43311253e4d7ce19bc08350c
|
[
"Apache-2.0"
] | 29
|
2015-02-01T22:35:17.000Z
|
2017-08-07T08:18:23.000Z
|
svca_limix/limix/deprecated/archive/qtl_old.py
|
DenisSch/svca
|
bd029c120ca8310f43311253e4d7ce19bc08350c
|
[
"Apache-2.0"
] | 35
|
2015-02-01T17:26:50.000Z
|
2019-09-13T07:06:16.000Z
|
"""
qtl.py contains wrappers around C++ Limix objects to streamline common tasks in GWAS.
"""
import scipy as SP
import scipy.stats as ST
import limix
import limix.utils.preprocess as preprocess
import limix.deprecated.modules.varianceDecomposition as VAR
import limix.utils.fdr as FDR
import time
#TODO: externally visible function?
#I propose to make this internal using _
def estimateKronCovariances(phenos,K1r=None,K1c=None,K2r=None,K2c=None,covs=None,Acovs=None,covar_type='lowrank_diag',rank=1):
"""
estimates the background covariance model before testing
Args:
phenos: [N x P] SP.array of P phenotypes for N individuals
K1r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K1c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: list of SP.arrays holding covariates. Each covs[i] has one corresponding Acovs[i]
Acovs: list of SP.arrays holding the phenotype design matrices for covariates.
Each covs[i] has one corresponding Acovs[i].
covar_type: type of covaraince to use. Default 'freeform'. possible values are
'freeform': free form optimization,
'fixed': use a fixed matrix specified in covar_K0,
'diag': optimize a diagonal matrix,
'lowrank': optimize a low rank matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_id': optimize a low rank matrix plus the weight of a constant diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_diag': optimize a low rank matrix plus a free diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'block': optimize the weight of a constant P x P block matrix of ones,
'block_id': optimize the weight of a constant P x P block matrix of ones plus the weight of a constant diagonal matrix,
'block_diag': optimize the weight of a constant P x P block matrix of ones plus a free diagonal matrix,
rank: rank of a possible lowrank component (default 1)
Returns:
CVarianceDecomposition object
"""
print(".. Training the backgrond covariance with a GP model")
vc = VAR.CVarianceDecomposition(phenos)
if K1r is not None:
vc.addRandomEffect(K1r,covar_type=covar_type,rank=rank)
if K2r is not None:
#TODO: fix this; forces second term to be the noise covariance
vc.addRandomEffect(is_noise=True,K=K2r,covar_type=covar_type,rank=rank)
for ic in range(len(Acovs)):
vc.addFixedEffect(covs[ic],Acovs[ic])
start = time.time()
conv = vc.findLocalOptimum(fast=True)
assert conv, "CVariance Decomposition has not converged"
time_el = time.time()-start
print(("Background model trained in %.2f s" % time_el))
return vc
#TODO: externally visible function?
#what does this do?
def updateKronCovs(covs,Acovs,N,P):
"""
make sure that covs and Acovs are lists
"""
if (covs is None) and (Acovs is None):
covs = [SP.ones([N,1])]
Acovs = [SP.eye(P)]
if Acovs is None or covs is None:
raise Exception("Either Acovs or covs is None, while the other isn't")
if (type(Acovs)!=list) and (type(covs)!=list):
Acovs= [Acovs]
covs = [covs]
if (type(covs)!=list) or (type(Acovs)!=list) or (len(covs)!=len(Acovs)):
raise Exception("Either Acovs or covs is not a list or they missmatch in length")
return covs, Acovs
def simple_interaction_kronecker_deprecated(snps,phenos,covs=None,Acovs=None,Asnps1=None,Asnps0=None,K1r=None,K1c=None,K2r=None,K2c=None,covar_type='lowrank_diag',rank=1,searchDelta=False):
"""
I-variate fixed effects interaction test for phenotype specific SNP effects.
(Runs multiple likelihood ratio tests and computes the P-values in python from the likelihood ratios)
Args:
snps: [N x S] SP.array of S SNPs for N individuals (test SNPs)
phenos: [N x P] SP.array of P phenotypes for N individuals
covs: list of SP.arrays holding covariates. Each covs[i] has one corresponding Acovs[i]
Acovs: list of SP.arrays holding the phenotype design matrices for covariates.
Each covs[i] has one corresponding Acovs[i].
Asnps1: list of SP.arrays of I interaction variables to be tested for N
individuals. Note that it is assumed that Asnps0 is already included.
If not provided, the alternative model will be the independent model
Asnps0: single SP.array of I0 interaction variables to be included in the
background model when testing for interaction with Inters
K1r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K1c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covar_type: type of covaraince to use. Default 'freeform'. possible values are
'freeform': free form optimization,
'fixed': use a fixed matrix specified in covar_K0,
'diag': optimize a diagonal matrix,
'lowrank': optimize a low rank matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_id': optimize a low rank matrix plus the weight of a constant diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_diag': optimize a low rank matrix plus a free diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'block': optimize the weight of a constant P x P block matrix of ones,
'block_id': optimize the weight of a constant P x P block matrix of ones plus the weight of a constant diagonal matrix,
'block_diag': optimize the weight of a constant P x P block matrix of ones plus a free diagonal matrix,
rank: rank of a possible lowrank component (default 1)
searchDelta: Boolean indicator if delta is optimized during SNP testing (default False)
Returns:
pv: P-values of the interaction test
lrt0: log likelihood ratio statistics of the null model
pv0: P-values of the null model
lrt: log likelihood ratio statistics of the interaction test
lrtAlt: log likelihood ratio statistics of the alternative model
pvAlt: P-values of the alternative model
"""
S=snps.shape[1]
#0. checks
N = phenos.shape[0]
P = phenos.shape[1]
if K1r==None:
K1r = SP.dot(snps,snps.T)
else:
assert K1r.shape[0]==N, 'K1r: dimensions dismatch'
assert K1r.shape[1]==N, 'K1r: dimensions dismatch'
if K2r==None:
K2r = SP.eye(N)
else:
assert K2r.shape[0]==N, 'K2r: dimensions dismatch'
assert K2r.shape[1]==N, 'K2r: dimensions dismatch'
covs,Acovs = updateKronCovs(covs,Acovs,N,P)
#Asnps can be several designs
if (Asnps0 is None):
Asnps0 = [SP.ones([1,P])]
if Asnps1 is None:
Asnps1 = [SP.eye([P])]
if (type(Asnps0)!=list):
Asnps0 = [Asnps0]
if (type(Asnps1)!=list):
Asnps1 = [Asnps1]
assert (len(Asnps0)==1) and (len(Asnps1)>0), "need at least one Snp design matrix for null and alt model"
#one row per column design matrix
pv = SP.zeros((len(Asnps1),snps.shape[1]))
lrt = SP.zeros((len(Asnps1),snps.shape[1]))
pvAlt = SP.zeros((len(Asnps1),snps.shape[1]))
lrtAlt = SP.zeros((len(Asnps1),snps.shape[1]))
#1. run GP model to infer suitable covariance structure
if K1c==None or K2c==None:
vc = estimateKronCovariances(phenos=phenos, K1r=K1r, K2r=K2r, K1c=K1c, K2c=K2c, covs=covs, Acovs=Acovs, covar_type=covar_type, rank=rank)
K1c = vc.getEstTraitCovar(0)
K2c = vc.getEstTraitCovar(1)
else:
assert K1c.shape[0]==P, 'K1c: dimensions dismatch'
assert K1c.shape[1]==P, 'K1c: dimensions dismatch'
assert K2c.shape[0]==P, 'K2c: dimensions dismatch'
assert K2c.shape[1]==P, 'K2c: dimensions dismatch'
#2. run kroneckerLMM for null model
lmm = limix.CKroneckerLMM()
lmm.setK1r(K1r)
lmm.setK1c(K1c)
lmm.setK2r(K2r)
lmm.setK2c(K2c)
lmm.setSNPs(snps)
#add covariates
for ic in range(len(Acovs)):
lmm.addCovariates(covs[ic],Acovs[ic])
lmm.setPheno(phenos)
if searchDelta: lmm.setNumIntervalsAlt(100)
else: lmm.setNumIntervalsAlt(0)
lmm.setNumIntervals0(100)
#add SNP design
lmm.setSNPcoldesign(Asnps0[0])
lmm.process()
dof0 = Asnps0[0].shape[0]
pv0 = lmm.getPv()
lrt0 = ST.chi2.isf(pv0,dof0)
for iA in range(len(Asnps1)):
dof1 = Asnps1[iA].shape[0]
dof = dof1-dof0
lmm.setSNPcoldesign(Asnps1[iA])
lmm.process()
pvAlt[iA,:] = lmm.getPv()[0]
lrtAlt[iA,:] = ST.chi2.isf(pvAlt[iA,:],dof1)
lrt[iA,:] = lrtAlt[iA,:] - lrt0[0] # Don't need the likelihood ratios, as null model is the same between the two models
pv[iA,:] = ST.chi2.sf(lrt[iA,:],dof)
return pv,lrt0,pv0,lrt,lrtAlt,pvAlt
#TODO: (O.S), I have changed the parametrization of delta optimization steps. Happy with that?
#TODO: Do we really want to keep these "simple_XXX" names? Which functions are simple, which ones are not? I don't like it.
def simple_interaction_kronecker(snps,phenos,covs=None,Acovs=None,Asnps1=None,Asnps0=None,K1r=None,K1c=None,K2r=None,K2c=None,covar_type='lowrank_diag',rank=1,NumIntervalsDelta0=100,NumIntervalsDeltaAlt=0,searchDelta=False):
"""
I-variate fixed effects interaction test for phenotype specific SNP effects
Args:
snps: [N x S] SP.array of S SNPs for N individuals (test SNPs)
phenos: [N x P] SP.array of P phenotypes for N individuals
covs: list of SP.arrays holding covariates. Each covs[i] has one corresponding Acovs[i]
Acovs: list of SP.arrays holding the phenotype design matrices for covariates.
Each covs[i] has one corresponding Acovs[i].
Asnps1: list of SP.arrays of I interaction variables to be tested for N
individuals. Note that it is assumed that Asnps0 is already included.
If not provided, the alternative model will be the independent model
Asnps0: single SP.array of I0 interaction variables to be included in the
background model when testing for interaction with Inters
K1r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K1c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covar_type: type of covaraince to use. Default 'freeform'. possible values are
'freeform': free form optimization,
'fixed': use a fixed matrix specified in covar_K0,
'diag': optimize a diagonal matrix,
'lowrank': optimize a low rank matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_id': optimize a low rank matrix plus the weight of a constant diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_diag': optimize a low rank matrix plus a free diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'block': optimize the weight of a constant P x P block matrix of ones,
'block_id': optimize the weight of a constant P x P block matrix of ones plus the weight of a constant diagonal matrix,
'block_diag': optimize the weight of a constant P x P block matrix of ones plus a free diagonal matrix,
rank: rank of a possible lowrank component (default 1)
NumIntervalsDelta0: number of steps for delta optimization on the null model (100)
NumIntervalsDeltaAlt:number of steps for delta optimization on the alt. model (0 - no optimization)
searchDelta: Carry out delta optimization on the alternative model? if yes We use NumIntervalsDeltaAlt steps
Returns:
pv: P-values of the interaction test
pv0: P-values of the null model
pvAlt: P-values of the alternative model
"""
S=snps.shape[1]
#0. checks
N = phenos.shape[0]
P = phenos.shape[1]
if K1r==None:
K1r = SP.dot(snps,snps.T)
else:
assert K1r.shape[0]==N, 'K1r: dimensions dismatch'
assert K1r.shape[1]==N, 'K1r: dimensions dismatch'
if K2r==None:
K2r = SP.eye(N)
else:
assert K2r.shape[0]==N, 'K2r: dimensions dismatch'
assert K2r.shape[1]==N, 'K2r: dimensions dismatch'
covs,Acovs = updateKronCovs(covs,Acovs,N,P)
#Asnps can be several designs
if (Asnps0 is None):
Asnps0 = [SP.ones([1,P])]
if Asnps1 is None:
Asnps1 = [SP.eye([P])]
if (type(Asnps0)!=list):
Asnps0 = [Asnps0]
if (type(Asnps1)!=list):
Asnps1 = [Asnps1]
assert (len(Asnps0)==1) and (len(Asnps1)>0), "need at least one Snp design matrix for null and alt model"
#one row per column design matrix
pv = SP.zeros((len(Asnps1),snps.shape[1]))
lrt = SP.zeros((len(Asnps1),snps.shape[1]))
pvAlt = SP.zeros((len(Asnps1),snps.shape[1]))
lrtAlt = SP.zeros((len(Asnps1),snps.shape[1]))
#1. run GP model to infer suitable covariance structure
if K1c==None or K2c==None:
vc = estimateKronCovariances(phenos=phenos, K1r=K1r, K2r=K2r, K1c=K1c, K2c=K2c, covs=covs, Acovs=Acovs, covar_type=covar_type, rank=rank)
K1c = vc.getEstTraitCovar(0)
K2c = vc.getEstTraitCovar(1)
else:
assert K1c.shape[0]==P, 'K1c: dimensions dismatch'
assert K1c.shape[1]==P, 'K1c: dimensions dismatch'
assert K2c.shape[0]==P, 'K2c: dimensions dismatch'
assert K2c.shape[1]==P, 'K2c: dimensions dismatch'
#2. run kroneckerLMM for null model
lmm = limix.CKroneckerLMM()
lmm.setK1r(K1r)
lmm.setK1c(K1c)
lmm.setK2r(K2r)
lmm.setK2c(K2c)
lmm.setSNPs(snps)
#add covariates
for ic in range(len(Acovs)):
lmm.addCovariates(covs[ic],Acovs[ic])
lmm.setPheno(phenos)
#delta serch on alt. model?
if searchDelta:
lmm.setNumIntervalsAlt(NumIntervalsDeltaAlt)
lmm.setNumIntervals0_inter(NumIntervalsDeltaAlt)
else:
lmm.setNumIntervalsAlt(0)
lmm.setNumIntervals0_inter(0)
lmm.setNumIntervals0(NumIntervalsDelta0)
#add SNP design
lmm.setSNPcoldesign0_inter(Asnps0[0])
for iA in range(len(Asnps1)):
lmm.setSNPcoldesign(Asnps1[iA])
lmm.process()
pvAlt[iA,:] = lmm.getPv()[0]
pv[iA,:] = lmm.getPv()[1]
pv0 = lmm.getPv()[2]
return pv,pv0,pvAlt
## KroneckerLMM functions
def kronecker_lmm(snps,phenos,covs=None,Acovs=None,Asnps=None,K1r=None,K1c=None,K2r=None,K2c=None,covar_type='lowrank_diag',rank=1,NumIntervalsDelta0=100,NumIntervalsDeltaAlt=0,searchDelta=False):
"""
simple wrapper for kroneckerLMM code
Args:
snps: [N x S] SP.array of S SNPs for N individuals (test SNPs)
phenos: [N x P] SP.array of P phenotypes for N individuals
covs: list of SP.arrays holding covariates. Each covs[i] has one corresponding Acovs[i]
Acovs: list of SP.arrays holding the phenotype design matrices for covariates.
Each covs[i] has one corresponding Acovs[i].
Asnps: single SP.array of I0 interaction variables to be included in the
background model when testing for interaction with Inters
If not provided, the alternative model will be the independent model
K1r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K1c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covar_type: type of covaraince to use. Default 'freeform'. possible values are
'freeform': free form optimization,
'fixed': use a fixed matrix specified in covar_K0,
'diag': optimize a diagonal matrix,
'lowrank': optimize a low rank matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_id': optimize a low rank matrix plus the weight of a constant diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_diag': optimize a low rank matrix plus a free diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'block': optimize the weight of a constant P x P block matrix of ones,
'block_id': optimize the weight of a constant P x P block matrix of ones plus the weight of a constant diagonal matrix,
'block_diag': optimize the weight of a constant P x P block matrix of ones plus a free diagonal matrix,
rank: rank of a possible lowrank component (default 1)
NumIntervalsDelta0: number of steps for delta optimization on the null model (100)
NumIntervalsDeltaAlt:number of steps for delta optimization on the alt. model (0 - no optimization)
searchDelta: Boolean indicator if delta is optimized during SNP testing (default False)
Returns:
CKroneckerLMM object
P-values for all SNPs from liklelihood ratio test
"""
#0. checks
N = phenos.shape[0]
P = phenos.shape[1]
if K1r==None:
K1r = SP.dot(snps,snps.T)
else:
assert K1r.shape[0]==N, 'K1r: dimensions dismatch'
assert K1r.shape[1]==N, 'K1r: dimensions dismatch'
if K2r==None:
K2r = SP.eye(N)
else:
assert K2r.shape[0]==N, 'K2r: dimensions dismatch'
assert K2r.shape[1]==N, 'K2r: dimensions dismatch'
covs,Acovs = updateKronCovs(covs,Acovs,N,P)
#Asnps can be several designs
if Asnps is None:
Asnps = [SP.ones([1,P])]
if (type(Asnps)!=list):
Asnps = [Asnps]
assert len(Asnps)>0, "need at least one Snp design matrix"
#one row per column design matrix
pv = SP.zeros((len(Asnps),snps.shape[1]))
#1. run GP model to infer suitable covariance structure
if K1c==None or K2c==None:
vc = estimateKronCovariances(phenos=phenos, K1r=K1r, K2r=K2r, K1c=K1c, K2c=K2c, covs=covs, Acovs=Acovs, covar_type=covar_type, rank=rank)
K1c = vc.getEstTraitCovar(0)
K2c = vc.getEstTraitCovar(1)
else:
assert K1c.shape[0]==P, 'K1c: dimensions dismatch'
assert K1c.shape[1]==P, 'K1c: dimensions dismatch'
assert K2c.shape[0]==P, 'K2c: dimensions dismatch'
assert K2c.shape[1]==P, 'K2c: dimensions dismatch'
#2. run kroneckerLMM
lmm = limix.CKroneckerLMM()
lmm.setK1r(K1r)
lmm.setK1c(K1c)
lmm.setK2r(K2r)
lmm.setK2c(K2c)
lmm.setSNPs(snps)
#add covariates
for ic in range(len(Acovs)):
lmm.addCovariates(covs[ic],Acovs[ic])
lmm.setPheno(phenos)
#delta serch on alt. model?
if searchDelta:
lmm.setNumIntervalsAlt(NumIntervalsDeltaAlt)
else:
lmm.setNumIntervalsAlt(0)
lmm.setNumIntervals0(NumIntervalsDelta0)
for iA in range(len(Asnps)):
#add SNP design
lmm.setSNPcoldesign(Asnps[iA])
lmm.process()
pv[iA,:] = lmm.getPv()[0]
return lmm,pv
def simple_lmm(snps,pheno,K=None,covs=None, test='lrt',NumIntervalsDelta0=100,NumIntervalsDeltaAlt=0,searchDelta=False):
"""
Univariate fixed effects linear mixed model test for all SNPs
Args:
snps: [N x S] SP.array of S SNPs for N individuals
pheno: [N x 1] SP.array of 1 phenotype for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
test: 'lrt' for likelihood ratio test (default) or 'f' for F-test
NumIntervalsDelta0: number of steps for delta optimization on the null model (100)
NumIntervalsDeltaAlt:number of steps for delta optimization on the alt. model (0 - no optimization)
searchDelta: Carry out delta optimization on the alternative model? if yes We use NumIntervalsDeltaAlt steps
Returns:
limix LMM object
"""
t0=time.time()
if K is None:
K=SP.eye(snps.shape[0])
lm = limix.CLMM()
lm.setK(K)
lm.setSNPs(snps)
lm.setPheno(pheno)
if covs is None:
covs = SP.ones((snps.shape[0],1))
lm.setCovs(covs)
if test=='lrt':
lm.setTestStatistics(0)
elif test=='f':
lm.setTestStatistics(1)
else:
print(test)
raise NotImplementedError("only f or lrt are implemented")
#set number of delta grid optimizations?
lm.setNumIntervals0(NumIntervalsDelta0)
if searchDelta:
lm.setNumIntervalsAlt(NumIntervalsDeltaAlt)
else:
lm.setNumIntervalsAlt(0)
lm.process()
t1=time.time()
print(("finished GWAS testing in %.2f seconds" %(t1-t0)))
return lm
#TODO: we need to fix. THis does not work as interact_GxE is not existing
#I vote we also use **kw_args to forward parameters to interact_Gxe?
def interact_GxG(pheno,snps1,snps2=None,K=None,covs=None):
"""
Epistasis test between two sets of SNPs
Args:
pheno: [N x 1] SP.array of 1 phenotype for N individuals
snps1: [N x S1] SP.array of S1 SNPs for N individuals
snps2: [N x S2] SP.array of S2 SNPs for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
Returns:
pv: [S2 x S1] SP.array of P values for epistasis tests beten all SNPs in
snps1 and snps2
"""
if K is None:
K=SP.eye(N)
N=snps1.shape[0]
if snps2 is None:
snps2 = snps1
return interact_GxE(snps=snps1,pheno=pheno,env=snps2,covs=covs,K=K)
def interact_GxE_1dof(snps,pheno,env,K=None,covs=None, test='lrt'):
"""
Univariate GxE fixed effects interaction linear mixed model test for all
pairs of SNPs and environmental variables.
Args:
snps: [N x S] SP.array of S SNPs for N individuals
pheno: [N x 1] SP.array of 1 phenotype for N individuals
env: [N x E] SP.array of E environmental variables for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
test: 'lrt' for likelihood ratio test (default) or 'f' for F-test
Returns:
pv: [E x S] SP.array of P values for interaction tests between all
E environmental variables and all S SNPs
"""
N=snps.shape[0]
if K is None:
K=SP.eye(N)
if covs is None:
covs = SP.ones((N,1))
assert (env.shape[0]==N and pheno.shape[0]==N and K.shape[0]==N and K.shape[1]==N and covs.shape[0]==N), "shapes missmatch"
Inter0 = SP.ones((N,1))
pv = SP.zeros((env.shape[1],snps.shape[1]))
print(("starting %i interaction scans for %i SNPs each." % (env.shape[1], snps.shape[1])))
t0=time.time()
for i in range(env.shape[1]):
t0_i = time.time()
cov_i = SP.concatenate((covs,env[:,i:(i+1)]),1)
lm_i = simple_interaction(snps=snps,pheno=pheno,covs=cov_i,Inter=env[:,i:(i+1)],Inter0=Inter0, test=test)
pv[i,:]=lm_i.getPv()[0,:]
t1_i = time.time()
print(("Finished %i out of %i interaction scans in %.2f seconds."%((i+1),env.shape[1],(t1_i-t0_i))))
t1 = time.time()
print(("-----------------------------------------------------------\nFinished all %i interaction scans in %.2f seconds."%(env.shape[1],(t1-t0))))
return pv
def phenSpecificEffects(snps,pheno1,pheno2,K=None,covs=None,test='lrt'):
"""
Univariate fixed effects interaction test for phenotype specific SNP effects
Args:
snps: [N x S] SP.array of S SNPs for N individuals (test SNPs)
pheno1: [N x 1] SP.array of 1 phenotype for N individuals
pheno2: [N x 1] SP.array of 1 phenotype for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
test: 'lrt' for likelihood ratio test (default) or 'f' for F-test
Returns:
limix LMM object
"""
N=snps.shape[0]
if K is None:
K=SP.eye(N)
assert (pheno1.shape[1]==pheno2.shape[1]), "Only consider equal number of phenotype dimensions"
if covs is None:
covs = SP.ones(N,1)
assert (pheno1.shape[1]==1 and pheno2.shape[1]==1 and pheno1.shape[0]==N and pheno2.shape[0]==N and K.shape[0]==N and K.shape[1]==N and covs.shape[0]==N), "shapes missmatch"
Inter = SP.zeros((N*2,1))
Inter[0:N,0]=1
Inter0 = SP.ones((N*2,1))
Yinter=SP.concatenate((pheno1,pheno2),0)
Xinter = SP.tile(snps,(2,1))
Covitner= SP.tile(covs(2,1))
lm = simple_interaction(snps=Xinter,pheno=Yinter,covs=Covinter,Inter=Inter,Inter0=Inter0,test=test)
return lm
def simple_interaction(snps,pheno,Inter,Inter0=None,covs = None,K=None,test='lrt'):
"""
I-variate fixed effects interaction test for phenotype specific SNP effects
Args:
snps: [N x S] SP.array of S SNPs for N individuals (test SNPs)
pheno: [N x 1] SP.array of 1 phenotype for N individuals
Inter: [N x I] SP.array of I interaction variables to be tested for N
individuals (optional). If not provided, only the SNP is
included in the null model.
Inter0: [N x I0] SP.array of I0 interaction variables to be included in the
background model when testing for interaction with Inter
covs: [N x D] SP.array of D covariates for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
test: 'lrt' for likelihood ratio test (default) or 'f' for F-test
Returns:
limix LMM object
"""
N=snps.shape[0]
if covs is None:
covs = SP.ones((N,1))
if K is None:
K = SP.eye(N)
if Inter0 is None:
Inter0=SP.ones([N,1])
assert (pheno.shape[0]==N and K.shape[0]==N and K.shape[1]==N and covs.shape[0]==N and Inter0.shape[0]==N and Inter.shape[0]==N), "shapes missmatch"
lmi = limix.CInteractLMM()
lmi.setK(K)
lmi.setSNPs(snps)
lmi.setPheno(pheno)
lmi.setCovs(covs)
lmi.setInter0(Inter0)
lmi.setInter(Inter)
if test=='lrt':
lmi.setTestStatistics(0)
elif test=='f':
lmi.setTestStatistics(1)
else:
print(test)
raise NotImplementedError("only f or lrt are implemented")
lmi.process()
return lmi
#TOOD: use **kw_args to forward params.. see below
def forward_lmm_kronecker(snps,phenos,Asnps=None,Acond=None,K1r=None,K1c=None,K2r=None,K2c=None,covs=None,Acovs=None,threshold = 5e-8, maxiter = 2,qvalues=False, update_covariances = False,**kw_args):
"""
Kronecker fixed effects test with forward selection
Args:
snps: [N x S] SP.array of S SNPs for N individuals (test SNPs)
pheno: [N x P] SP.array of 1 phenotype for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
threshold: (float) P-value thrashold for inclusion in forward selection (default 5e-8)
maxiter: (int) maximum number of interaction scans. First scan is
without inclusion, so maxiter-1 inclusions can be performed. (default 2)
qvalues: Use q-value threshold and return q-values in addition (default False)
update_covar: Boolean indicator if covariances should be re-estimated after each forward step (default False)
Returns:
lm: lmix LMMi object
resultStruct with elements:
iadded: array of indices of SNPs included in order of inclusion
pvadded: array of Pvalues obtained by the included SNPs in iteration
before inclusion
pvall: [maxiter x S] SP.array of Pvalues for all iterations
Optional: corresponding q-values
qvadded
qvall
"""
#0. checks
N = phenos.shape[0]
P = phenos.shape[1]
if K1r==None:
K1r = SP.dot(snps,snps.T)
else:
assert K1r.shape[0]==N, 'K1r: dimensions dismatch'
assert K1r.shape[1]==N, 'K1r: dimensions dismatch'
if K2r==None:
K2r = SP.eye(N)
else:
assert K2r.shape[0]==N, 'K2r: dimensions dismatch'
assert K2r.shape[1]==N, 'K2r: dimensions dismatch'
covs,Acovs = updateKronCovs(covs,Acovs,N,P)
if Asnps is None:
Asnps = [SP.ones([1,P])]
if (type(Asnps)!=list):
Asnps = [Asnps]
assert len(Asnps)>0, "need at least one Snp design matrix"
if Acond is None:
Acond = Asnps
if (type(Acond)!=list):
Acond = [Acond]
assert len(Acond)>0, "need at least one Snp design matrix"
#1. run GP model to infer suitable covariance structure
if K1c==None or K2c==None:
vc = estimateKronCovariances(phenos=phenos, K1r=K1r, K2r=K2r, K1c=K1c, K2c=K2c, covs=covs, Acovs=Acovs, **kw_args)
K1c = vc.getEstTraitCovar(0)
K2c = vc.getEstTraitCovar(1)
else:
vc = None
assert K1c.shape[0]==P, 'K1c: dimensions dismatch'
assert K1c.shape[1]==P, 'K1c: dimensions dismatch'
assert K2c.shape[0]==P, 'K2c: dimensions dismatch'
assert K2c.shape[1]==P, 'K2c: dimensions dismatch'
t0 = time.time()
lm,pv = kronecker_lmm(snps=snps,phenos=phenos,Asnps=Asnps,K1r=K1r,K2r=K2r,K1c=K1c,K2c=K2c,covs=covs,Acovs=Acovs)
#get pv
#start stuff
iadded = []
pvadded = []
qvadded = []
time_el = []
pvall = SP.zeros((pv.shape[0]*maxiter,pv.shape[1]))
qvall = None
t1=time.time()
print(("finished GWAS testing in %.2f seconds" %(t1-t0)))
time_el.append(t1-t0)
pvall[0:pv.shape[0],:]=pv
imin= SP.unravel_index(pv.argmin(),pv.shape)
score=pv[imin].min()
niter = 1
if qvalues:
assert pv.shape[0]==1, "This is untested with the fdr package. pv.shape[0]==1 failed"
qvall = SP.zeros((maxiter,snps.shape[1]))
qv = FDR.qvalues(pv)
qvall[0:1,:] = qv
score=qv[imin]
#loop:
while (score<threshold) and niter<maxiter:
t0=time.time()
pvadded.append(pv[imin])
iadded.append(imin)
if qvalues:
qvadded.append(qv[imin])
if update_covariances and vc is not None:
vc.addFixedTerm(snps[:,imin[1]:(imin[1]+1)],Acond[imin[0]])
vc.setScales()#CL: don't know what this does, but findLocalOptima crashes becahuse vc.noisPos=None
vc.findLocalOptima(fast=True)
K1c = vc.getEstTraitCovar(0)
K2c = vc.getEstTraitCovar(1)
lm.setK1c(K1c)
lm.setK2c(K2c)
lm.addCovariates(snps[:,imin[1]:(imin[1]+1)],Acond[imin[0]])
for i in range(len(Asnps)):
#add SNP design
lm.setSNPcoldesign(Asnps[i])
lm.process()
pv[i,:] = lm.getPv()[0]
pvall[niter*pv.shape[0]:(niter+1)*pv.shape[0]]=pv
imin= SP.unravel_index(pv.argmin(),pv.shape)
if qvalues:
qv = FDR.qvalues(pv)
qvall[niter:niter+1,:] = qv
score = qv[imin].min()
else:
score = pv[imin].min()
t1=time.time()
print(("finished GWAS testing in %.2f seconds" %(t1-t0)))
time_el.append(t1-t0)
niter=niter+1
RV = {}
RV['iadded'] = iadded
RV['pvadded'] = pvadded
RV['pvall'] = pvall
RV['time_el'] = time_el
if qvalues:
RV['qvall'] = qvall
RV['qvadded'] = qvadded
return lm,RV
def forward_lmm(snps,pheno,K=None,covs=None,qvalues=False,threshold = 5e-8, maxiter = 2,test='lrt',**kw_args):
"""
univariate fixed effects test with forward selection
Args:
snps: [N x S] SP.array of S SNPs for N individuals (test SNPs)
pheno: [N x 1] SP.array of 1 phenotype for N individuals
K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covs: [N x D] SP.array of D covariates for N individuals
threshold: (float) P-value thrashold for inclusion in forward selection (default 5e-8)
maxiter: (int) maximum number of interaction scans. First scan is
without inclusion, so maxiter-1 inclusions can be performed. (default 2)
test: 'lrt' for likelihood ratio test (default) or 'f' for F-test
Returns:
lm: limix LMM object
iadded: array of indices of SNPs included in order of inclusion
pvadded: array of Pvalues obtained by the included SNPs in iteration
before inclusion
pvall: [maxiter x S] SP.array of Pvalues for all iterations
"""
if K is None:
K=SP.eye(snps.shape[0])
if covs is None:
covs = SP.ones((snps.shape[0],1))
lm = simple_lmm(snps,pheno,K=K,covs=covs,test=test,**kw_args)
pvall = SP.zeros((maxiter,snps.shape[1]))
pv = lm.getPv()
pvall[0:1,:]=pv
imin= pv.argmin()
niter = 1
#start stuff
iadded = []
pvadded = []
qvadded = []
if qvalues:
assert pv.shape[0]==1, "This is untested with the fdr package. pv.shape[0]==1 failed"
qvall = SP.zeros((maxiter,snps.shape[1]))
qv = FDR.qvalues(pv)
qvall[0:1,:] = qv
score=qv.min()
else:
score=pv.min()
while (score<threshold) and niter<maxiter:
t0=time.time()
iadded.append(imin)
pvadded.append(pv[0,imin])
if qvalues:
qvadded.append(qv[0,imin])
covs=SP.concatenate((covs,snps[:,imin:(imin+1)]),1)
lm.setCovs(covs)
lm.process()
pv = lm.getPv()
pvall[niter:niter+1,:]=pv
imin= pv.argmin()
if qvalues:
qv = FDR.qvalues(pv)
qvall[niter:niter+1,:] = qv
score = qv.min()
else:
score = pv.min()
t1=time.time()
print(("finished GWAS testing in %.2f seconds" %(t1-t0)))
niter=niter+1
RV = {}
RV['iadded'] = iadded
RV['pvadded'] = pvadded
RV['pvall'] = pvall
if qvalues:
RV['qvall'] = qvall
RV['qvadded'] = qvadded
return lm,RV
| 44.886256
| 224
| 0.623799
| 5,327
| 37,884
| 4.418434
| 0.083724
| 0.019926
| 0.02409
| 0.021413
| 0.782895
| 0.765731
| 0.739601
| 0.721502
| 0.715214
| 0.704975
| 0
| 0.02396
| 0.277294
| 37,884
| 843
| 225
| 44.939502
| 0.835714
| 0.504514
| 0
| 0.66147
| 0
| 0
| 0.113823
| 0.003917
| 0
| 0
| 0
| 0.004745
| 0.097996
| 1
| 0.026726
| false
| 0
| 0.01559
| 0
| 0.069042
| 0.024499
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
651c18211c85649b67779728d21f66ba389fadb0
| 196
|
py
|
Python
|
tests/start.py
|
Lambda-School-Labs/cryptolytic-ds
|
b58b6eb2b82a404f9d2d468e706d49d9c5999f21
|
[
"MIT"
] | 13
|
2019-10-10T21:01:23.000Z
|
2020-06-05T11:18:31.000Z
|
tests/start.py
|
ross-fisher/cryptolytic-ds
|
1539ae7311a622035d631058ebe47e7c697e3c11
|
[
"MIT"
] | 3
|
2019-12-18T16:46:48.000Z
|
2020-01-09T21:47:48.000Z
|
tests/start.py
|
ross-fisher/cryptolytic-ds
|
1539ae7311a622035d631058ebe47e7c697e3c11
|
[
"MIT"
] | 10
|
2019-10-15T15:30:25.000Z
|
2020-05-11T22:07:52.000Z
|
import json
import os
from dotenv import load_dotenv
def init():
# using test environment
load_dotenv(verbose=True, dotenv_path='tests/test.env')
print(os.environ['POSTGRES_DBNAME'])
| 21.777778
| 59
| 0.744898
| 28
| 196
| 5.071429
| 0.714286
| 0.140845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153061
| 196
| 8
| 60
| 24.5
| 0.855422
| 0.112245
| 0
| 0
| 0
| 0
| 0.168605
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6548d3a72a518a1eed45436163953c077af0ae16
| 4,576
|
py
|
Python
|
integration_tests/test_suites/celery-k8s-integration-test-suite/test_monitoring.py
|
kbd/dagster
|
14affaf1372fcb5169e6c2d5d53621eeed954767
|
[
"Apache-2.0"
] | null | null | null |
integration_tests/test_suites/celery-k8s-integration-test-suite/test_monitoring.py
|
kbd/dagster
|
14affaf1372fcb5169e6c2d5d53621eeed954767
|
[
"Apache-2.0"
] | null | null | null |
integration_tests/test_suites/celery-k8s-integration-test-suite/test_monitoring.py
|
kbd/dagster
|
14affaf1372fcb5169e6c2d5d53621eeed954767
|
[
"Apache-2.0"
] | 1
|
2021-11-25T11:06:39.000Z
|
2021-11-25T11:06:39.000Z
|
# pylint doesn't know about pytest fixtures
# pylint: disable=unused-argument
import os
import time
from dagster.core.storage.pipeline_run import PipelineRunStatus
from dagster.core.test_utils import poll_for_finished_run
from dagster.utils import merge_dicts
from dagster.utils.yaml_utils import merge_yamls
from dagster_k8s.job import get_job_name_from_run_id
from dagster_k8s.utils import delete_job
from dagster_k8s_test_infra.integration_utils import image_pull_policy, launch_run_over_graphql
from dagster_test.test_project import get_test_project_environments_path
IS_BUILDKITE = os.getenv("BUILDKITE") is not None
def log_run_events(instance, run_id):
for log in instance.all_logs(run_id):
print(str(log) + "\n") # pylint: disable=print-call
def get_celery_job_engine_config(dagster_docker_image, job_namespace):
return {
"execution": {
"config": merge_dicts(
(
{
"job_image": dagster_docker_image,
}
if dagster_docker_image
else {}
),
{
"job_namespace": job_namespace,
"image_pull_policy": image_pull_policy(),
},
)
},
}
def get_failing_celery_job_engine_config(dagster_docker_image, job_namespace):
return {
"execution": {
"config": merge_dicts(
(
{
"job_image": dagster_docker_image,
}
if dagster_docker_image
else {}
),
{
"job_namespace": job_namespace,
"image_pull_policy": image_pull_policy(),
"env_config_maps": ["non-existent-config-map"],
},
)
},
}
def test_run_monitoring_fails_on_interrupt( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, helm_namespace, dagit_url
):
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env.yaml"),
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_celery_job_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
pipeline_name = "demo_job_celery"
try:
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name=pipeline_name
)
start_time = time.time()
while time.time() - start_time < 60:
run = dagster_instance.get_run_by_id(run_id)
if run.status == PipelineRunStatus.STARTED:
break
assert run.status == PipelineRunStatus.STARTING
time.sleep(1)
assert delete_job(get_job_name_from_run_id(run_id), helm_namespace)
poll_for_finished_run(dagster_instance, run.run_id, timeout=120)
assert dagster_instance.get_run_by_id(run_id).status == PipelineRunStatus.FAILURE
finally:
log_run_events(dagster_instance, run_id)
def test_run_monitoring_startup_fail( # pylint: disable=redefined-outer-name
dagster_docker_image, dagster_instance, helm_namespace, dagit_url
):
run_config = merge_dicts(
merge_yamls(
[
os.path.join(get_test_project_environments_path(), "env.yaml"),
os.path.join(get_test_project_environments_path(), "env_s3.yaml"),
]
),
get_failing_celery_job_engine_config(
dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace
),
)
pipeline_name = "demo_job_celery"
try:
run_id = launch_run_over_graphql(
dagit_url, run_config=run_config, pipeline_name=pipeline_name
)
start_time = time.time()
while time.time() - start_time < 60:
run = dagster_instance.get_run_by_id(run_id)
if run.status == PipelineRunStatus.STARTED:
break
assert run.status == PipelineRunStatus.STARTING
time.sleep(1)
assert delete_job(get_job_name_from_run_id(run_id), helm_namespace)
poll_for_finished_run(dagster_instance, run.run_id, timeout=120)
assert dagster_instance.get_run_by_id(run_id).status == PipelineRunStatus.FAILURE
finally:
log_run_events(dagster_instance, run_id)
| 33.896296
| 95
| 0.625874
| 526
| 4,576
| 5.019011
| 0.203422
| 0.032197
| 0.081818
| 0.049242
| 0.750379
| 0.739015
| 0.731818
| 0.731818
| 0.730682
| 0.724242
| 0
| 0.005289
| 0.29764
| 4,576
| 134
| 96
| 34.149254
| 0.816117
| 0.038024
| 0
| 0.60177
| 0
| 0
| 0.051183
| 0.005232
| 0
| 0
| 0
| 0
| 0.053097
| 1
| 0.044248
| false
| 0
| 0.088496
| 0.017699
| 0.150442
| 0.00885
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e8daf9f98268ce4989e55b3928309a3e9ca0e904
| 170
|
py
|
Python
|
tests/data_app/admin.py
|
ahoazure/khro-data_wizard
|
4925113ffeea54057a062fc8a0cdab7c23a8e18a
|
[
"MIT"
] | 279
|
2015-09-16T18:57:37.000Z
|
2022-03-28T13:37:39.000Z
|
tests/data_app/admin.py
|
ahoazure/khro-data_wizard
|
4925113ffeea54057a062fc8a0cdab7c23a8e18a
|
[
"MIT"
] | 32
|
2015-09-16T18:30:19.000Z
|
2021-11-19T07:19:33.000Z
|
tests/data_app/admin.py
|
ahoazure/khro-data_wizard
|
4925113ffeea54057a062fc8a0cdab7c23a8e18a
|
[
"MIT"
] | 53
|
2016-07-01T12:24:49.000Z
|
2022-02-14T16:19:45.000Z
|
from django.contrib import admin
from .models import SimpleModel, Type, FKModel
admin.site.register(SimpleModel)
admin.site.register(Type)
admin.site.register(FKModel)
| 21.25
| 46
| 0.817647
| 23
| 170
| 6.043478
| 0.478261
| 0.194245
| 0.366906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 170
| 7
| 47
| 24.285714
| 0.896774
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e8fc946d07188bc0fe3df98d4e3badfb3b053b55
| 168
|
py
|
Python
|
hcap_accounts/models/__init__.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | null | null | null |
hcap_accounts/models/__init__.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | 31
|
2020-04-11T13:38:17.000Z
|
2021-09-22T18:51:11.000Z
|
hcap_accounts/models/__init__.py
|
fabiommendes/capacidade_hospitalar
|
4f675b574573eb3f51e6be8a927ea230bf2712c7
|
[
"MIT"
] | 1
|
2020-04-12T17:51:20.000Z
|
2020-04-12T17:51:20.000Z
|
from .anonymous_user import AnonymousUser
from .healthcare_unit_notifier import HealthcareUnitNotifier
from .region_manager import RegionManager
from .user import User
| 33.6
| 60
| 0.880952
| 20
| 168
| 7.2
| 0.6
| 0.138889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 168
| 4
| 61
| 42
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
336fc9bcf19676c5906faefa302f1cf8b07ff499
| 104
|
py
|
Python
|
soap_incident_client/utils/__init__.py
|
zommiommy/soap_incident_client
|
dec8dc90996bea9bdd3e45f4a87a49fac8b78ee4
|
[
"MIT"
] | null | null | null |
soap_incident_client/utils/__init__.py
|
zommiommy/soap_incident_client
|
dec8dc90996bea9bdd3e45f4a87a49fac8b78ee4
|
[
"MIT"
] | null | null | null |
soap_incident_client/utils/__init__.py
|
zommiommy/soap_incident_client
|
dec8dc90996bea9bdd3e45f4a87a49fac8b78ee4
|
[
"MIT"
] | null | null | null |
from .get_file import get_file
from .soap_call import soap_call
from .logger import logger, setup_logger
| 34.666667
| 40
| 0.846154
| 18
| 104
| 4.611111
| 0.444444
| 0.168675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 104
| 3
| 40
| 34.666667
| 0.902174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
681ff3d103500fb8382ce9ca93c08fadfa775e2b
| 579
|
py
|
Python
|
modules/feeds/__init__.py
|
elliotwutingfeng/Google-Safe-Browsing-DNSBL-Generator
|
1ed8d49047081dd4f6d929f3f9d4d97d21c366e4
|
[
"BSD-3-Clause"
] | null | null | null |
modules/feeds/__init__.py
|
elliotwutingfeng/Google-Safe-Browsing-DNSBL-Generator
|
1ed8d49047081dd4f6d929f3f9d4d97d21c366e4
|
[
"BSD-3-Clause"
] | null | null | null |
modules/feeds/__init__.py
|
elliotwutingfeng/Google-Safe-Browsing-DNSBL-Generator
|
1ed8d49047081dd4f6d929f3f9d4d97d21c366e4
|
[
"BSD-3-Clause"
] | null | null | null |
from modules.feeds.afnic import AFNIC
from modules.feeds.aws_ec2 import AmazonWebServicesEC2
from modules.feeds.cubdomain import CubDomain
from modules.feeds.domainsproject import DomainsProject
from modules.feeds.icann import ICANN
from modules.feeds.internet_ee import InternetEE
from modules.feeds.ipv4 import Ipv4
from modules.feeds.openintel import OpenINTEL
from modules.feeds.registrar_r01 import RegistrarR01
from modules.feeds.sk_nic import SKNIC
from modules.feeds.switch_ch import SwitchCH
from modules.feeds.top1m import Top1M
from modules.feeds.top10m import Top10M
| 41.357143
| 55
| 0.865285
| 83
| 579
| 5.975904
| 0.325301
| 0.288306
| 0.419355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026565
| 0.08981
| 579
| 13
| 56
| 44.538462
| 0.914611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
684c6f02a6285c3d827c3aa564e9d8df8d1c99a0
| 64
|
py
|
Python
|
data_processing/__init__.py
|
liorgefen86/disaster_response
|
727d5b7e2ff7561866e4f2d560be99fee8317ef6
|
[
"MIT"
] | null | null | null |
data_processing/__init__.py
|
liorgefen86/disaster_response
|
727d5b7e2ff7561866e4f2d560be99fee8317ef6
|
[
"MIT"
] | null | null | null |
data_processing/__init__.py
|
liorgefen86/disaster_response
|
727d5b7e2ff7561866e4f2d560be99fee8317ef6
|
[
"MIT"
] | null | null | null |
from .classifier_functions import *
from .process_data import *
| 21.333333
| 35
| 0.8125
| 8
| 64
| 6.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 64
| 2
| 36
| 32
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
685249213ad0122703f2708f5cea6ad067e666d2
| 14,179
|
py
|
Python
|
misc/acrn-config/scenario_config/vm_configurations_h.py
|
yfliuu/acrn-hypervisor
|
6289124e7c894323e2a5342bf201856d76512a60
|
[
"BSD-3-Clause"
] | null | null | null |
misc/acrn-config/scenario_config/vm_configurations_h.py
|
yfliuu/acrn-hypervisor
|
6289124e7c894323e2a5342bf201856d76512a60
|
[
"BSD-3-Clause"
] | null | null | null |
misc/acrn-config/scenario_config/vm_configurations_h.py
|
yfliuu/acrn-hypervisor
|
6289124e7c894323e2a5342bf201856d76512a60
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (C) 2019 Intel Corporation. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
import scenario_cfg_lib
VM_HEADER_DEFINE = scenario_cfg_lib.HEADER_LICENSE + r"""
#ifndef VM_CONFIGURATIONS_H
#define VM_CONFIGURATIONS_H
"""
VM_END_DEFINE = r"""#endif /* VM_CONFIGURATIONS_H */"""
def gen_common_header(config):
"""
This is common header for vm_configuration.h
:param config: it is the pointer which file write to
:return: None
"""
print("{0}".format(VM_HEADER_DEFINE), file=config)
def cpu_affinity_output(vm_info, i, config):
"""
Output the macro vcpu affinity
:param vm_info: the data structure have all the xml items values
:param i: the index of vm id
:param config: file pointor to store the information
"""
if vm_info.load_order[i] == "SOS_VM":
return
cpu_bits = vm_info.get_cpu_bitmap(i)
print("#define VM{0}_CONFIG_VCPU_AFFINITY\t{1}".format(
i, cpu_bits['cpu_map']), file=config)
def gen_sdc_header(vm_info, config):
"""
Generate vm_configuration.h of sdc scenario
:param config: it is the pointer which file write to
:return: None
"""
gen_common_header(config)
print("#include <misc_cfg.h>\n", file=config)
print("#define CONFIG_MAX_VM_NUM\t\t(2U + CONFIG_MAX_KATA_VM_NUM)", file=config)
print("", file=config)
print("/* Bits mask of guest flags that can be programmed by device model." +
" Other bits are set by hypervisor only */", file=config)
print("#define DM_OWNED_GUEST_FLAG_MASK\t" +
"(GUEST_FLAG_SECURE_WORLD_ENABLED | GUEST_FLAG_LAPIC_PASSTHROUGH | \\\n" +
"\t\t\t\t\t\tGUEST_FLAG_RT | GUEST_FLAG_IO_COMPLETION_POLLING)", file=config)
print("", file=config)
print("#define SOS_VM_BOOTARGS\t\t\tSOS_ROOTFS\t\\", file=config)
print('\t\t\t\t\t"rw rootwait "\t\\', file=config)
print('\t\t\t\t\t"console=tty0 " \\', file=config)
print("\t\t\t\t\tSOS_CONSOLE\t\\", file=config)
print('\t\t\t\t\t"consoleblank=0 "\t\\', file=config)
print('\t\t\t\t\t"no_timer_check "\t\\', file=config)
print('\t\t\t\t\t"quiet loglevel=3 "\t\\', file=config)
print('\t\t\t\t\t"i915.nuclear_pageflip=1 " \\', file=config)
print('\t\t\t\t\t"i915.avail_planes_per_pipe=0x01010F "\t\\', file=config)
print('\t\t\t\t\t"i915.domain_plane_owners=0x011111110000 " \\', file=config)
print('\t\t\t\t\t"i915.enable_gvt=1 "\t\\', file=config)
print("\t\t\t\t\tSOS_BOOTARGS_DIFF", file=config)
print("", file=config)
# POST LAUNCHED VM
if scenario_cfg_lib.KATA_VM_COUNT == 1:
print("#if CONFIG_MAX_KATA_VM_NUM > 0", file=config)
# Set VM1 vcpu
cpu_affinity_output(vm_info, 1, config)
# KATA VM
cpu_affinity_output(vm_info, 2, config)
#else:
print("#else", file=config)
# Only two VMs in SDC config, setup vcpu affinity for VM1
cpu_affinity_output(vm_info, 1, config)
print("#endif", file=config)
else:
cpu_affinity_output(vm_info, 1, config)
print("", file=config)
print("{0}".format(VM_END_DEFINE), file=config)
def gen_sdc2_header(vm_info, config):
"""
Generate vm_configuration.h of sdc2 scenario
:param config: it is the pointer which file write to
:return: None
"""
gen_common_header(config)
print("#include <misc_cfg.h>\n", file=config)
print("#define CONFIG_MAX_VM_NUM\t\t({0}U + CONFIG_MAX_KATA_VM_NUM)".format(
scenario_cfg_lib.VM_COUNT), file=config)
print("", file=config)
print("/* Bits mask of guest flags that can be programmed by device model." +
" Other bits are set by hypervisor only */", file=config)
print("#define DM_OWNED_GUEST_FLAG_MASK\t" +
"(GUEST_FLAG_SECURE_WORLD_ENABLED | GUEST_FLAG_LAPIC_PASSTHROUGH | \\\n" +
"\t\t\t\t\t\tGUEST_FLAG_RT | GUEST_FLAG_IO_COMPLETION_POLLING)", file=config)
print("", file=config)
print("#define SOS_VM_BOOTARGS\t\t\tSOS_ROOTFS\t\\", file=config)
print('\t\t\t\t\t"rw rootwait "\t\\', file=config)
print('\t\t\t\t\t"console=tty0 " \\', file=config)
print("\t\t\t\t\tSOS_CONSOLE\t\\", file=config)
print('\t\t\t\t\t"consoleblank=0 "\t\\', file=config)
print('\t\t\t\t\t"no_timer_check "\t\\', file=config)
print('\t\t\t\t\t"quiet loglevel=3 "\t\\', file=config)
print('\t\t\t\t\t"i915.nuclear_pageflip=1 " \\', file=config)
print('\t\t\t\t\t"i915.avail_planes_per_pipe=0x01010F "\t\\', file=config)
print('\t\t\t\t\t"i915.domain_plane_owners=0x011111110000 " \\', file=config)
print('\t\t\t\t\t"i915.enable_gvt=1 "\t\\', file=config)
print("\t\t\t\t\tSOS_BOOTARGS_DIFF", file=config)
print("", file=config)
for i in range(scenario_cfg_lib.VM_COUNT):
cpu_affinity_output(vm_info, i, config)
print("", file=config)
print("{0}".format(VM_END_DEFINE), file=config)
def logic_max_vm_num(config):
"""
This is logical max vm number comment
:param config: it is the pointer which file write to
:return: None
"""
print("", file=config)
print("#define CONFIG_MAX_VM_NUM\t{0}U".format(scenario_cfg_lib.VM_COUNT), file=config)
print("", file=config)
print("/* The VM CONFIGs like:", file=config)
print(" *\tVMX_CONFIG_VCPU_AFFINITY", file=config)
print(" *\tVMX_CONFIG_MEM_START_HPA", file=config)
print(" *\tVMX_CONFIG_MEM_SIZE", file=config)
print(" *\tVMX_CONFIG_MEM_START_HPA2", file=config)
print(" *\tVMX_CONFIG_MEM_SIZE_HPA2", file=config)
print(" *\tVMX_CONFIG_OS_BOOTARG_ROOT", file=config)
print(" *\tVMX_CONFIG_OS_BOOTARG_MAX_CPUS", file=config)
print(" *\tVMX_CONFIG_OS_BOOTARG_CONSOLE", file=config)
print(" * might be different on your board, please modify them per your needs.", file=config)
print(" */", file=config)
print("", file=config)
def gen_logical_partition_header(vm_info, config):
"""
Generate vm_configuration.h of logical_partition scenario
:param config: it is the pointer which file write to
:return: None
"""
scenario_cfg_lib.vms_count = scenario_cfg_lib.VM_COUNT
gen_common_header(config)
# map all the needed pci sub class
print("#include <pci_devices.h>", file=config)
print("#include <misc_cfg.h>", file=config)
print("", file=config)
print("/* Bits mask of guest flags that can be programmed by device model." +
" Other bits are set by hypervisor only */", file=config)
print("#define DM_OWNED_GUEST_FLAG_MASK\t0UL", file=config)
logic_max_vm_num(config)
for i in range(scenario_cfg_lib.VM_COUNT):
cpu_bits = vm_info.get_cpu_bitmap(i)
cpu_affinity_output(vm_info, i, config)
print("#define VM{0}_CONFIG_MEM_START_HPA\t\t{1}UL".format(
i, vm_info.mem_info.mem_start_hpa[i]), file=config)
print("#define VM{0}_CONFIG_MEM_SIZE\t\t\t{1}UL".format(
i, vm_info.mem_info.mem_size[i]), file=config)
print("#define VM{0}_CONFIG_MEM_START_HPA2\t\t{1}UL".format(
i, vm_info.mem_info.mem_start_hpa2[i]), file=config)
print("#define VM{0}_CONFIG_MEM_SIZE_HPA2\t\t{1}UL".format(
i, vm_info.mem_info.mem_size_hpa2[i]), file=config)
print('#define VM{0}_CONFIG_OS_BOOTARG_ROOT\t\t"root={1} "'.format(
i, vm_info.os_cfg.kern_root_dev[i]), file=config)
print('#define VM{0}_CONFIG_OS_BOOTARG_MAXCPUS\t\t"maxcpus={1} "'.format(
i, cpu_bits['cpu_num']), file=config)
print('#define VM{0}_CONFIG_OS_BOOTARG_CONSOLE\t\t"console={1} "'.format(
i, vm_info.os_cfg.kern_console[i]), file=config)
print("", file=config)
print('/* VM pass-through devices assign policy:', file=config)
print(' * VM0: one Mass Storage controller, one Network controller;', file=config)
print(' * VM1: one Mass Storage controller, one Network controller' +
'(if a secondary Network controller class device exist);', file=config)
print(' */', file=config)
print('#define VM0_STORAGE_CONTROLLER\t\t\tSATA_CONTROLLER_0', file=config)
print('#define VM0_NETWORK_CONTROLLER\t\t\tETHERNET_CONTROLLER_0', file=config)
print('#define VM0_CONFIG_PCI_DEV_NUM\t\t\t3U', file=config)
print('', file=config)
print('#define VM1_STORAGE_CONTROLLER\t\t\tUSB_CONTROLLER_0', file=config)
print('#if defined(ETHERNET_CONTROLLER_1)', file=config)
print('/* if a secondary Ethernet controller subclass exist, assign to VM1 */', file=config)
print('#define VM1_NETWORK_CONTROLLER\t\t\tETHERNET_CONTROLLER_1', file=config)
print('#elif defined(NETWORK_CONTROLLER_0)', file=config)
print('/* if a Network controller subclass exist' +
'(usually it is a wireless network card), assign to VM1 */', file=config)
print('#define VM1_NETWORK_CONTROLLER\t\t\tNETWORK_CONTROLLER_0', file=config)
print('#endif', file=config)
print('', file=config)
print('#if defined(VM1_NETWORK_CONTROLLER)', file=config)
print('#define VM1_CONFIG_PCI_DEV_NUM\t\t\t3U', file=config)
print('#else', file=config)
print('/* no network controller could be assigned to VM1 */', file=config)
print('#define VM1_CONFIG_PCI_DEV_NUM\t\t\t2U', file=config)
print('#endif', file=config)
print("", file=config)
print("{0}".format(VM_END_DEFINE), file=config)
def gen_industry_header(vm_info, config):
"""
Generate vm_configuration.h of industry scenario
:param config: it is the pointer which file write to
:return: None
"""
gen_common_header(config)
print("#include <misc_cfg.h>", file=config)
print("", file=config)
print("#define CONFIG_MAX_VM_NUM\t\t({0}U + CONFIG_MAX_KATA_VM_NUM)".format(
scenario_cfg_lib.VM_COUNT), file=config)
print("", file=config)
print("/* Bits mask of guest flags that can be programmed by device model." +
" Other bits are set by hypervisor only */", file=config)
print("#define DM_OWNED_GUEST_FLAG_MASK\t(GUEST_FLAG_SECURE_WORLD_ENABLED | " +
"GUEST_FLAG_LAPIC_PASSTHROUGH | \\", file=config)
print("\t\t\t\t\t\tGUEST_FLAG_RT | GUEST_FLAG_IO_COMPLETION_POLLING)", file=config)
print("", file=config)
print("#define SOS_VM_BOOTARGS\t\t\tSOS_ROOTFS\t\\", file=config)
print('\t\t\t\t\t"rw rootwait "\t\\', file=config)
print('\t\t\t\t\t"console=tty0 "\t\\', file=config)
print("\t\t\t\t\tSOS_CONSOLE\t\\", file=config)
print('\t\t\t\t\t"consoleblank=0\t"\t\\', file=config)
print('\t\t\t\t\t"no_timer_check "\t\\', file=config)
print('\t\t\t\t\t"quiet loglevel=3 "\t\\', file=config)
print('\t\t\t\t\t"i915.nuclear_pageflip=1 " \\', file=config)
print('\t\t\t\t\t"i915.avail_planes_per_pipe=0x01010F "\t\\', file=config)
print('\t\t\t\t\t"i915.domain_plane_owners=0x011111110000 " \\', file=config)
print('\t\t\t\t\t"i915.enable_gvt=1 "\t\\', file=config)
print("\t\t\t\t\tSOS_BOOTARGS_DIFF", file=config)
print("", file=config)
for i in range(scenario_cfg_lib.VM_COUNT):
cpu_affinity_output(vm_info, i, config)
print("", file=config)
print("{0}".format(VM_END_DEFINE), file=config)
def gen_hybrid_header(vm_info, config):
"""
Generate vm_configuration.h of hybrid scenario
:param vm_info: it is the class which contain all user setting information
:param config: it is the pointer which file write to
:return: None
"""
gen_common_header(config)
print("#include <misc_cfg.h>\n", file=config)
print("/* Bits mask of guest flags that can be programmed by device model." +
" Other bits are set by hypervisor only */", file=config)
print("#define DM_OWNED_GUEST_FLAG_MASK\t" +
"(GUEST_FLAG_SECURE_WORLD_ENABLED | GUEST_FLAG_LAPIC_PASSTHROUGH | \\\n" +
"\t\t\t\t\t\tGUEST_FLAG_RT | GUEST_FLAG_IO_COMPLETION_POLLING)", file=config)
print("", file=config)
print("#define CONFIG_MAX_VM_NUM\t\t({0}U + CONFIG_MAX_KATA_VM_NUM)".format(
scenario_cfg_lib.VM_COUNT), file=config)
print("", file=config)
for i in range(scenario_cfg_lib.VM_COUNT):
cpu_affinity_output(vm_info, i, config)
print("#define VM0_CONFIG_MEM_START_HPA\t{0}UL".format(
vm_info.mem_info.mem_start_hpa[0]), file=config)
print("#define VM0_CONFIG_MEM_SIZE\t\t{0}UL".format(vm_info.mem_info.mem_size[0]), file=config)
print("#define VM0_CONFIG_MEM_START_HPA2\t{0}UL".format(
vm_info.mem_info.mem_start_hpa2[0]), file=config)
print("#define VM0_CONFIG_MEM_SIZE_HPA2\t{0}UL".format(vm_info.mem_info.mem_size_hpa2[0]), file=config)
print("", file=config)
print("#define SOS_VM_BOOTARGS\t\t\tSOS_ROOTFS\t\\", file=config)
print('\t\t\t\t\t"rw rootwait "\t\\', file=config)
print('\t\t\t\t\t"console=tty0 " \\', file=config)
print("\t\t\t\t\tSOS_CONSOLE\t\\", file=config)
print('\t\t\t\t\t"consoleblank=0 "\t\\', file=config)
print('\t\t\t\t\t"no_timer_check "\t\\', file=config)
print('\t\t\t\t\t"quiet loglevel=3 "\t\\', file=config)
print('\t\t\t\t\t"i915.nuclear_pageflip=1 " \\', file=config)
print('\t\t\t\t\t"i915.avail_planes_per_pipe=0x01010F "\t\\', file=config)
print('\t\t\t\t\t"i915.domain_plane_owners=0x011111110000 " \\', file=config)
print('\t\t\t\t\t"i915.enable_gvt=1 "\t\\', file=config)
print("\t\t\t\t\tSOS_BOOTARGS_DIFF", file=config)
print("", file=config)
print("{0}".format(VM_END_DEFINE), file=config)
def generate_file(scenario, vm_info, config):
"""
Start to generate vm_configurations.h
:param scenario: it is scenario name
:param vm_info: it is the class which contain all user setting information
:param config: it is a file pointer of board information for writing to
"""
if scenario == 'sdc':
gen_sdc_header(vm_info, config)
elif scenario == 'sdc2':
gen_sdc2_header(vm_info, config)
elif scenario == 'logical_partition':
gen_logical_partition_header(vm_info, config)
elif scenario == 'industry':
gen_industry_header(vm_info, config)
else:
# scenario is 'hybrid'
gen_hybrid_header(vm_info, config)
| 44.448276
| 107
| 0.672403
| 2,234
| 14,179
| 4.047896
| 0.101164
| 0.046445
| 0.04545
| 0.038925
| 0.825943
| 0.792547
| 0.75141
| 0.684618
| 0.659737
| 0.612739
| 0
| 0.020053
| 0.173496
| 14,179
| 318
| 108
| 44.58805
| 0.7516
| 0.107201
| 0
| 0.568889
| 0
| 0
| 0.430525
| 0.256577
| 0
| 0
| 0.00708
| 0
| 0
| 1
| 0.04
| false
| 0.022222
| 0.004444
| 0
| 0.048889
| 0.657778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
686a67e8dfcec5c5ab7840a95b1bf06eb185100f
| 175
|
py
|
Python
|
00 - Hello Search/python/00_tree_example.py
|
melodrivemusic/CodeOfAI
|
7f8f6f13e0f2193c43f8fc900ea52f57398251b6
|
[
"MIT"
] | 6
|
2019-03-07T19:31:09.000Z
|
2020-03-12T11:17:14.000Z
|
00 - Hello Search/python/00_tree_example.py
|
melodrivemusic/CodeOfAI
|
7f8f6f13e0f2193c43f8fc900ea52f57398251b6
|
[
"MIT"
] | null | null | null |
00 - Hello Search/python/00_tree_example.py
|
melodrivemusic/CodeOfAI
|
7f8f6f13e0f2193c43f8fc900ea52f57398251b6
|
[
"MIT"
] | 5
|
2019-02-14T06:51:22.000Z
|
2021-04-21T08:40:21.000Z
|
tree = [
# 0
[1, 7, 8],
# 1
[0, 2, 7, 8, 9],
# 2
[1, 3, 8, 9, 10],
# 3
[2, 9, 10],
# 4
[],
# 5
[],
# 6
[],
# ...
]
| 9.722222
| 21
| 0.165714
| 24
| 175
| 1.208333
| 0.5
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.328947
| 0.565714
| 175
| 17
| 22
| 10.294118
| 0.052632
| 0.097143
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
686cd9233b74d4522ccde6f1dac783c22949b841
| 2,354
|
py
|
Python
|
python_framework/__init__.py
|
SamuelJansen/python_framework
|
a3e57def47c13edd67319f9bbca32be2bbb00f43
|
[
"MIT"
] | 5
|
2020-09-02T20:05:44.000Z
|
2022-03-04T21:02:13.000Z
|
python_framework/__init__.py
|
SamuelJansen/python_framework
|
a3e57def47c13edd67319f9bbca32be2bbb00f43
|
[
"MIT"
] | 1
|
2021-05-23T22:55:58.000Z
|
2021-05-24T15:33:50.000Z
|
python_framework/__init__.py
|
SamuelJansen/python_framework
|
a3e57def47c13edd67319f9bbca32be2bbb00f43
|
[
"MIT"
] | 3
|
2020-11-01T01:13:09.000Z
|
2022-02-22T15:01:19.000Z
|
from python_framework.api.src.annotation import EnumAnnotation
from python_framework.api.src.helper import Serializer
from python_framework.api.src.service import ExceptionHandler
from python_framework.api.src.service.ExceptionHandler import GlobalException
from python_framework.api.src.service import Security
from python_framework.api.src.service import SchedulerManager
from python_framework.api.src.service import SqlAlchemyProxy
from python_framework.api.src.service import WebBrowser
from python_framework.api.src.service.openapi import OpenApiManager
from python_framework.api.src.service.openapi import OpenApiDocumentationFile
from python_framework.api.src.service.flask import FlaskManager
from python_framework.api.src.service.flask import ResourceManager
from python_framework.api.src.enumeration.HttpStatus import HttpStatus
from python_framework.api.src.enumeration.ActuatorHealthStatus import ActuatorHealthStatus
from python_framework.api.src.enumeration.SchedulerType import SchedulerType
from python_framework.api.src.converter.static import ConverterStatic
from python_framework.api.src.model import FrameworkModel
from python_framework.api.src.model import ErrorLog
from python_framework.api.src.model import ActuatorHealth
from python_framework.api.src.dto import ActuatorHealthDto
from python_framework.api.src.controller import ActuatorHealthController
from python_framework.api.src.converter import ActuatorHealthConverter
from python_framework.api.src.service import ActuatorHealthService
from python_framework.api.src.repository import ActuatorHealthRepository
from python_framework.api.src.annotation.EnumAnnotation import *
from python_framework.api.src.service.flask.FlaskManager import *
from python_framework.api.src.annotation.SchedulerAnnotation import *
from python_framework.api.src.annotation.ServiceAnnotation import *
from python_framework.api.src.annotation.ClientAnnotation import *
from python_framework.api.src.annotation.RepositoryAnnotation import *
from python_framework.api.src.annotation.ValidatorAnnotation import *
from python_framework.api.src.annotation.MapperAnnotation import *
from python_framework.api.src.annotation.ConverterAnnotation import *
from python_framework.api.src.annotation.HelperAnnotation import *
from python_framework.api.src.annotation.GlobalExceptionAnnotation import *
| 52.311111
| 90
| 0.878505
| 290
| 2,354
| 7.010345
| 0.175862
| 0.172159
| 0.327103
| 0.378751
| 0.640433
| 0.591244
| 0.451549
| 0.086572
| 0
| 0
| 0
| 0
| 0.063297
| 2,354
| 44
| 91
| 53.5
| 0.921995
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d79ebd257bc5341781fb16b63ce6fc6baf3c52d3
| 48
|
py
|
Python
|
ghao/errors.py
|
pkubik/ghao
|
45f83f77f706cb0f599b5b0d490a6b4b24fa0199
|
[
"MIT"
] | null | null | null |
ghao/errors.py
|
pkubik/ghao
|
45f83f77f706cb0f599b5b0d490a6b4b24fa0199
|
[
"MIT"
] | null | null | null |
ghao/errors.py
|
pkubik/ghao
|
45f83f77f706cb0f599b5b0d490a6b4b24fa0199
|
[
"MIT"
] | null | null | null |
class GhaoRuntimeError(RuntimeError):
pass
| 12
| 37
| 0.770833
| 4
| 48
| 9.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 48
| 3
| 38
| 16
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
d7a7c9b405df435dd6f61a3734856d2ec01cffae
| 268
|
py
|
Python
|
sprinter/exceptions.py
|
toumorokoshi/sprinter
|
20c01bf210e1e24dbfcae7416cdf266b0f936c4b
|
[
"MIT"
] | 3
|
2015-01-30T09:01:26.000Z
|
2018-08-23T03:33:52.000Z
|
sprinter/exceptions.py
|
toumorokoshi/sprinter
|
20c01bf210e1e24dbfcae7416cdf266b0f936c4b
|
[
"MIT"
] | 26
|
2015-08-12T01:01:03.000Z
|
2019-01-29T05:18:02.000Z
|
sprinter/exceptions.py
|
toumorokoshi/sprinter
|
20c01bf210e1e24dbfcae7416cdf266b0f936c4b
|
[
"MIT"
] | 3
|
2016-01-18T21:23:53.000Z
|
2017-02-01T18:14:23.000Z
|
"""
This lists all the exceptions in sprinter
"""
from __future__ import unicode_literals
class SprinterException(Exception):
""" For generic sprinter exceptions """
class FormulaException(SprinterException):
""" For a generic exception with a formula """
| 20.615385
| 50
| 0.746269
| 29
| 268
| 6.724138
| 0.724138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 268
| 12
| 51
| 22.333333
| 0.870536
| 0.425373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
d7a9c27412d874142cb375bec44c13d0e12f3e9a
| 123
|
py
|
Python
|
terra/msg/auth/__init__.py
|
jooddang/terra-py
|
c048ffd53dad13cdfb0c516ccef3d06b1b968cb2
|
[
"MIT"
] | null | null | null |
terra/msg/auth/__init__.py
|
jooddang/terra-py
|
c048ffd53dad13cdfb0c516ccef3d06b1b968cb2
|
[
"MIT"
] | null | null | null |
terra/msg/auth/__init__.py
|
jooddang/terra-py
|
c048ffd53dad13cdfb0c516ccef3d06b1b968cb2
|
[
"MIT"
] | null | null | null |
from terra.msg.auth.stdsignmsg import StdSignMsg
from terra.msg.auth.stdtx import StdTx
__all__ = ["StdSignMsg", "StdTx"]
| 24.6
| 48
| 0.780488
| 17
| 123
| 5.411765
| 0.470588
| 0.195652
| 0.26087
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105691
| 123
| 4
| 49
| 30.75
| 0.836364
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d7cd55ccba12553a78679261f6089ac1d17b4af5
| 46,983
|
py
|
Python
|
source/api/controlplane/plugin/runtime/app.py
|
awslabs/aws-media-replay-engine
|
2c217eff42f8e2c56b43e2ecf593f5aaa92c5451
|
[
"Apache-2.0"
] | 22
|
2021-11-24T01:23:07.000Z
|
2022-03-26T23:24:46.000Z
|
source/api/controlplane/plugin/runtime/app.py
|
awslabs/aws-media-replay-engine
|
2c217eff42f8e2c56b43e2ecf593f5aaa92c5451
|
[
"Apache-2.0"
] | null | null | null |
source/api/controlplane/plugin/runtime/app.py
|
awslabs/aws-media-replay-engine
|
2c217eff42f8e2c56b43e2ecf593f5aaa92c5451
|
[
"Apache-2.0"
] | 3
|
2021-12-10T09:42:51.000Z
|
2022-02-16T02:22:50.000Z
|
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import os
import json
import uuid
import urllib.parse
import boto3
from decimal import Decimal
from datetime import datetime
from chalice import Chalice
from chalice import IAMAuthorizer
from chalice import ChaliceViewError, BadRequestError, NotFoundError
from boto3.dynamodb.types import TypeSerializer
from boto3.dynamodb.conditions import Key, Attr
from botocore.client import ClientError
from jsonschema import validate, ValidationError
from chalicelib import DecimalEncoder
from chalicelib import load_api_schema, replace_decimals, generate_plugin_state_definition
app = Chalice(app_name='aws-mre-controlplane-plugin-api')
API_VERSION = '1.0.0'
authorizer = IAMAuthorizer()
serializer = TypeSerializer()
ddb_resource = boto3.resource("dynamodb")
ddb_client = boto3.client("dynamodb")
MODEL_TABLE_NAME = os.environ['MODEL_TABLE_NAME']
PLUGIN_TABLE_NAME = os.environ['PLUGIN_TABLE_NAME']
FRAMEWORK_VERSION = os.environ['FRAMEWORK_VERSION']
PLUGIN_VERSION_INDEX = os.environ['PLUGIN_VERSION_INDEX']
PLUGIN_NAME_INDEX = os.environ['PLUGIN_NAME_INDEX']
API_SCHEMA = load_api_schema()
@app.route('/plugin', cors=True, methods=['POST'], authorizer=authorizer)
def register_plugin():
"""
Register a new plugin or publish a new version of an existing plugin with updated
attribute values.
Plugins can be one of the following types:
- Sync: Contains all the required processing logic within the plugin to achieve the end result
- SyncModel: Depends on a Machine Learning model to help with achieving the end result
Body:
.. code-block:: python
{
"Name": string,
"Description": string,
"Class": ["Classifier"|"Optimizer"|"Featurer"|"Labeler"]
"ExecutionType": ["Sync"|"SyncModel"],
"SupportedMediaType": ["Video"|"Audio"],
"ContentGroups": list,
"ExecuteLambdaQualifiedARN": arn,
"ModelEndpoints": [
{
"Name": string,
"Version": string
},
...
],
"Configuration" : {
"configuration1": "value1",
...
},
"OutputAttributes" : {
"attribute1": {
"Description": string
},
...
},
"DependentPlugins": list
}
Parameters:
- Name: Name of the Plugin
- Description: Description of the Plugin
- Class: One of "Classifier"|"Optimizer"|"Featurer"|"Labeler"
- ExecutionType: One of "Sync"|"SyncModel". SyncModel indicates that the Plugin has a ML Model dependency.
- SupportedMediaType: One of "Video"|"Audio". Whether Plugin operates on Video or Audio source
- ContentGroups: List of Content Group supported by the Plugin
- ExecuteLambdaQualifiedARN: ARN of the Lambda function that encapsulates the Plugin implementation
- ModelEndpoints: List of Dicts which contains the MRE Models used by the Plugin. Required only when the ExecutionType is SyncModel.
- Configuration: Configuration values which impact the Plugin behavior. For example, MlModelConfidenceScore: 60
- OutputAttributes: List of dict that have the name of the attributes the Plugin Outputs. These attributes can be configured to create Replays within MRE.
- DependentPlugins: A list of Plugin names on which this Plugin depends on. MRE executes the dependent plugins before executing this plugin.
Returns:
A dict containing the Id and Version of the registered plugin
.. code-block:: python
{
"Id": string,
"Version": string
}
Raises:
400 - BadRequestError
404 - NotFoundError
500 - ChaliceViewError
"""
try:
plugin = json.loads(app.current_request.raw_body.decode(), parse_float=Decimal)
validate(instance=plugin, schema=API_SCHEMA["register_plugin"])
print("Got a valid plugin schema")
name = plugin["Name"]
execution_type = plugin["ExecutionType"]
if execution_type == "SyncModel":
if "ModelEndpoints" not in plugin:
raise BadRequestError("Missing required key 'ModelEndpoints' in the input")
else:
model_table = ddb_resource.Table(MODEL_TABLE_NAME)
for model_endpoint in plugin["ModelEndpoints"]:
model_name = model_endpoint["Name"]
model_version = model_endpoint["Version"]
response = model_table.get_item(
Key={
"Name": model_name,
"Version": model_version
},
ConsistentRead=True
)
if "Item" not in response:
raise NotFoundError(f"Model endpoint '{model_name}' with version '{model_version}' not found")
elif not response["Item"]["Enabled"]:
raise BadRequestError(
f"Model endpoint '{model_name}' with version '{model_version}' is disabled in the system")
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
# Check if all the DependentPlugins are already registered and enabled in the system
if "DependentPlugins" in plugin:
dependent_plugins = plugin["DependentPlugins"]
for d_plugin in dependent_plugins:
if d_plugin == name:
raise BadRequestError(f"Plugin '{d_plugin}' cannot be a dependent of itself")
response = plugin_table.get_item(
Key={
"Name": d_plugin,
"Version": "v0"
},
ConsistentRead=True
)
if "Item" not in response:
raise NotFoundError(f"Dependent plugin '{d_plugin}' not found")
elif not response["Item"]["Enabled"]:
raise BadRequestError(f"Dependent plugin '{d_plugin}' is disabled in the system")
else:
dependent_plugins = []
output_attributes = plugin["OutputAttributes"] if "OutputAttributes" in plugin else {}
response = plugin_table.get_item(
Key={
"Name": name,
"Version": "v0"
},
ConsistentRead=True
)
if "Item" not in response:
print(f"Registering a new plugin '{name}'")
plugin["Id"] = str(uuid.uuid4())
latest_version = 0
higher_version = 1
else:
print(f"Publishing a new version of the plugin '{name}'")
plugin["Id"] = response["Item"]["Id"]
latest_version = response["Item"]["Latest"]
higher_version = int(latest_version) + 1
plugin["Created"] = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
plugin["Enabled"] = True
plugin["FrameworkVersion"] = FRAMEWORK_VERSION
state_definition = generate_plugin_state_definition(execution_type)
state_definition_str = json.dumps(state_definition)
state_definition_str = state_definition_str.replace("%%PLUGIN_NAME%%", name)
state_definition_str = state_definition_str.replace("%%PLUGIN_CLASS%%", plugin["Class"])
state_definition_str = state_definition_str.replace("%%PLUGIN_EXECUTION_TYPE%%", execution_type)
state_definition_str = state_definition_str.replace("%%PLUGIN_EXECUTE_LAMBDA_ARN%%",
plugin["ExecuteLambdaQualifiedARN"])
state_definition_str = state_definition_str.replace("\"%%PLUGIN_DEPENDENT_PLUGINS%%\"",
json.dumps(dependent_plugins))
state_definition_str = state_definition_str.replace("\"%%PLUGIN_OUTPUT_ATTRIBUTES%%\"",
json.dumps(output_attributes))
plugin["StateDefinition"] = state_definition_str
print(f"Plugin State Definition: {state_definition_str}")
# Serialize Python object to DynamoDB object
serialized_plugin = {k: serializer.serialize(v) for k, v in plugin.items()}
ddb_client.transact_write_items(
TransactItems=[
{
"Update": {
"TableName": PLUGIN_TABLE_NAME,
"Key": {
"Name": {"S": name},
"Version": {"S": "v0"}
},
"ConditionExpression": "attribute_not_exists(#Latest) OR #Latest = :Latest",
"UpdateExpression": "SET #Latest = :Higher_version, #Id = :Id, #Class = :Class, #Description = :Description, #ContentGroups = :ContentGroups, #ExecutionType = :ExecutionType, #SupportedMediaType = :SupportedMediaType, #ExecuteLambda = :ExecuteLambda, #StateDefinition = :StateDefinition, #ModelEndpoints = :ModelEndpoints, #Configuration = :Configuration, #OutputAttributes = :OutputAttributes, #DependentPlugins = :DependentPlugins, #Created = :Created, #Enabled = :Enabled, #FrameworkVersion = :FrameworkVersion",
"ExpressionAttributeNames": {
"#Latest": "Latest",
"#Id": "Id",
"#Class": "Class",
"#Description": "Description",
"#ContentGroups": "ContentGroups",
"#ExecutionType": "ExecutionType",
"#SupportedMediaType": "SupportedMediaType",
"#ExecuteLambda": "ExecuteLambdaQualifiedARN",
"#StateDefinition": "StateDefinition",
"#ModelEndpoints": "ModelEndpoints",
"#Configuration": "Configuration",
"#OutputAttributes": "OutputAttributes",
"#DependentPlugins": "DependentPlugins",
"#Created": "Created",
"#Enabled": "Enabled",
"#FrameworkVersion": "FrameworkVersion"
},
"ExpressionAttributeValues": {
":Latest": {"N": str(latest_version)},
":Higher_version": {"N": str(higher_version)},
":Id": serialized_plugin["Id"],
":Class": serialized_plugin["Class"],
":Description": serialized_plugin[
"Description"] if "Description" in serialized_plugin else {"S": ""},
":ContentGroups": serialized_plugin["ContentGroups"],
":ExecutionType": serialized_plugin["ExecutionType"],
":SupportedMediaType": serialized_plugin["SupportedMediaType"],
":ExecuteLambda": serialized_plugin["ExecuteLambdaQualifiedARN"],
":StateDefinition": serialized_plugin["StateDefinition"],
":ModelEndpoints": serialized_plugin[
"ModelEndpoints"] if execution_type == "SyncModel" else {"L": []},
":Configuration": serialized_plugin[
"Configuration"] if "Configuration" in serialized_plugin else {"M": {}},
":OutputAttributes": serialized_plugin[
"OutputAttributes"] if "OutputAttributes" in serialized_plugin else {"M": {}},
":DependentPlugins": serialized_plugin[
"DependentPlugins"] if "DependentPlugins" in serialized_plugin else {"L": []},
":Created": serialized_plugin["Created"],
":Enabled": serialized_plugin["Enabled"],
":FrameworkVersion": serialized_plugin["FrameworkVersion"]
}
}
},
{
"Put": {
"TableName": PLUGIN_TABLE_NAME,
"Item": {
"Name": {"S": name},
"Version": {"S": "v" + str(higher_version)},
"Id": serialized_plugin["Id"],
"Class": serialized_plugin["Class"],
"Description": serialized_plugin["Description"] if "Description" in serialized_plugin else {
"S": ""},
"ContentGroups": serialized_plugin["ContentGroups"],
"ExecutionType": serialized_plugin["ExecutionType"],
"SupportedMediaType": serialized_plugin["SupportedMediaType"],
"ExecuteLambdaQualifiedARN": serialized_plugin["ExecuteLambdaQualifiedARN"],
"StateDefinition": serialized_plugin["StateDefinition"],
"ModelEndpoints": serialized_plugin[
"ModelEndpoints"] if execution_type == "SyncModel" else {"L": []},
"Configuration": serialized_plugin[
"Configuration"] if "Configuration" in serialized_plugin else {"M": {}},
"OutputAttributes": serialized_plugin[
"OutputAttributes"] if "OutputAttributes" in serialized_plugin else {"M": {}},
"Created": serialized_plugin["Created"],
"Enabled": serialized_plugin["Enabled"],
"DependentPlugins": serialized_plugin[
"DependentPlugins"] if "DependentPlugins" in serialized_plugin else {"L": []},
"FrameworkVersion": serialized_plugin["FrameworkVersion"]
}
}
}
]
)
except BadRequestError as e:
print(f"Got chalice BadRequestError: {str(e)}")
raise
except ValidationError as e:
print(f"Got jsonschema ValidationError: {str(e)}")
raise BadRequestError(e.message)
except NotFoundError as e:
print(f"Got chalice NotFoundError: {str(e)}")
raise
except Exception as e:
print(f"Unable to register or publish a new version of the plugin: {str(e)}")
raise ChaliceViewError(f"Unable to register or publish a new version of the plugin: {str(e)}")
else:
print(
f"Successfully registered or published a new version of the plugin: {json.dumps(plugin, cls=DecimalEncoder)}")
return {
"Id": plugin["Id"],
"Version": "v" + str(higher_version)
}
@app.route('/plugin/all', cors=True, methods=['GET'], authorizer=authorizer)
def list_plugins():
"""
List the latest version of all the registered plugins.
Each plugin has version "v0" which holds a copy of the latest plugin revision.
By default, return only the plugins that are "Enabled" in the system. In order
to also return the "Disabled" plugins, include the query parameter "include_disabled=true".
Returns:
.. code-block:: python
[
{
"Name": string,
"Id": string,
"Class": ["Classifier"|"Optimizer"|"Featurer"|"Labeler"],
"Description": string,
"ContentGroups": list,
"ExecutionType": ["Sync"|"SyncModel"],
"SupportedMediaType": ["Video"|"Audio"],
"ExecuteLambdaQualifiedARN": arn,
"StateDefinition": string,
"ModelEndpoints": [
{
"Name": string,
"Version": string
},
...
],
"Configuration" : {
"configuration1": "value1",
...
},
"OutputAttributes" : {
"attribute1": {
"Description": string
},
...
},
"DependentPlugins": list,
"Version": string,
"Created": timestamp,
"Latest": number,
"Enabled": boolean,
"FrameworkVersion": "x.x.x"
},
...
]
Raises:
500 - ChaliceViewError
"""
try:
print("Listing the latest version of all the registered plugins")
query_params = app.current_request.query_params
if query_params and query_params.get("include_disabled") == "true":
filter_expression = Attr("Enabled").is_in([True, False])
else:
filter_expression = Attr("Enabled").eq(True)
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.query(
IndexName=PLUGIN_VERSION_INDEX,
KeyConditionExpression=Key("Version").eq("v0"),
FilterExpression=filter_expression
)
plugins = response["Items"]
while "LastEvaluatedKey" in response:
response = plugin_table.query(
IndexName=PLUGIN_VERSION_INDEX,
ExclusiveStartKey=response["LastEvaluatedKey"],
KeyConditionExpression=Key("Version").eq("v0"),
FilterExpression=filter_expression
)
plugins.extend(response["Items"])
except Exception as e:
print(f"Unable to list the latest version of all the registered plugins: {str(e)}")
raise ChaliceViewError(f"Unable to list the latest version of all the registered plugins: {str(e)}")
else:
return replace_decimals(plugins)
@app.route('/plugin/class/{plugin_class}/all', cors=True, methods=['GET'], authorizer=authorizer)
def list_plugins_by_class(plugin_class):
"""
List the latest version of all the registered plugins by class.
Each plugin has version "v0" which holds a copy of the latest plugin revision.
By default, return only the plugins that are "Enabled" in the system. In order
to also return the "Disabled" plugins, include the query parameter "include_disabled=true".
Returns:
.. code-block:: python
[
{
"Name": string,
"Id": string,
"Class": ["Classifier"|"Optimizer"|"Featurer"|"Labeler"],
"Description": string,
"ContentGroups": list,
"ExecutionType": ["Sync"|"SyncModel"],
"SupportedMediaType": ["Video"|"Audio"],
"ExecuteLambdaQualifiedARN": arn,
"StateDefinition": string,
"ModelEndpoints": [
{
"Name": string,
"Version": string
},
...
],
"Configuration" : {
"configuration1": "value1",
...
},
"OutputAttributes" : {
"attribute1": {
"Description": string
},
...
},
"DependentPlugins": list,
"Version": string,
"Created": timestamp,
"Latest": number,
"Enabled": boolean,
"FrameworkVersion": "x.x.x"
},
...
]
Raises:
500 - ChaliceViewError
"""
try:
plugin_class = urllib.parse.unquote(plugin_class)
print(f"Listing the latest version of all the registered plugins for class '{plugin_class}'")
query_params = app.current_request.query_params
if query_params and query_params.get("include_disabled") == "true":
filter_expression = Attr("Enabled").is_in([True, False])
else:
filter_expression = Attr("Enabled").eq(True)
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.query(
IndexName=PLUGIN_VERSION_INDEX,
KeyConditionExpression=Key("Version").eq("v0"),
FilterExpression=Attr("Class").eq(plugin_class) & filter_expression
)
plugins = response["Items"]
while "LastEvaluatedKey" in response:
response = plugin_table.query(
IndexName=PLUGIN_VERSION_INDEX,
ExclusiveStartKey=response["LastEvaluatedKey"],
KeyConditionExpression=Key("Version").eq("v0"),
FilterExpression=Attr("Class").eq(plugin_class) & filter_expression
)
plugins.extend(response["Items"])
except Exception as e:
print(f"Unable to list the latest version of all the registered plugins for class '{plugin_class}': {str(e)}")
raise ChaliceViewError(
f"Unable to list the latest version of all the registered plugins for class '{plugin_class}': {str(e)}")
else:
return replace_decimals(plugins)
@app.route('/plugin/contentgroup/{content_group}/all', cors=True, methods=['GET'], authorizer=authorizer)
def list_plugins_by_contentgroup(content_group):
"""
List the latest version of all the registered plugins by content group.
Each plugin has version "v0" which holds a copy of the latest plugin revision.
By default, return only the plugins that are "Enabled" in the system. In order
to also return the "Disabled" plugins, include the query parameter "include_disabled=true".
Returns:
.. code-block:: python
[
{
"Name": string,
"Id": string,
"Class": ["Classifier"|"Optimizer"|"Featurer"|"Labeler"],
"Description": string,
"ContentGroups": list,
"ExecutionType": ["Sync"|"SyncModel"],
"SupportedMediaType": ["Video"|"Audio"],
"ExecuteLambdaQualifiedARN": arn,
"StateDefinition": string,
"ModelEndpoints": [
{
"Name": string,
"Version": string
},
...
],
"Configuration" : {
"configuration1": "value1",
...
},
"OutputAttributes" : {
"attribute1": {
"Description": string
},
...
},
"DependentPlugins": list,
"Version": string,
"Created": timestamp,
"Latest": number,
"Enabled": boolean,
"FrameworkVersion": "x.x.x"
},
...
]
Raises:
500 - ChaliceViewError
"""
try:
content_group = urllib.parse.unquote(content_group)
print(f"Listing the latest version of all the registered plugins for content group '{content_group}'")
query_params = app.current_request.query_params
if query_params and query_params.get("include_disabled") == "true":
filter_expression = Attr("Enabled").is_in([True, False])
else:
filter_expression = Attr("Enabled").eq(True)
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.query(
IndexName=PLUGIN_VERSION_INDEX,
KeyConditionExpression=Key("Version").eq("v0"),
FilterExpression=Attr("ContentGroups").contains(content_group) & filter_expression
)
plugins = response["Items"]
while "LastEvaluatedKey" in response:
response = plugin_table.query(
IndexName=PLUGIN_VERSION_INDEX,
ExclusiveStartKey=response["LastEvaluatedKey"],
KeyConditionExpression=Key("Version").eq("v0"),
FilterExpression=Attr("ContentGroups").contains(content_group) & filter_expression
)
plugins.extend(response["Items"])
except Exception as e:
print(
f"Unable to list the latest version of all the registered plugins for content group '{content_group}': {str(e)}")
raise ChaliceViewError(
f"Unable to list the latest version of all the registered plugins for content group '{content_group}': {str(e)}")
else:
return replace_decimals(plugins)
@app.route('/plugin/class/{plugin_class}/contentgroup/{content_group}/all', cors=True, methods=['GET'],
authorizer=authorizer)
def list_plugins_by_class_and_contentgroup(plugin_class, content_group):
"""
List the latest version of all the registered plugins by class and content group.
Each plugin has version "v0" which holds a copy of the latest plugin revision.
By default, return only the plugins that are "Enabled" in the system. In order
to also return the "Disabled" plugins, include the query parameter "include_disabled=true".
Returns:
.. code-block:: python
[
{
"Name": string,
"Id": string,
"Class": ["Classifier"|"Optimizer"|"Featurer"|"Labeler"],
"Description": string,
"ContentGroups": list,
"ExecutionType": ["Sync"|"SyncModel"],
"SupportedMediaType": ["Video"|"Audio"],
"ExecuteLambdaQualifiedARN": arn,
"StateDefinition": string,
"ModelEndpoints": [
{
"Name": string,
"Version": string
},
...
],
"Configuration" : {
"configuration1": "value1",
...
},
"OutputAttributes" : {
"attribute1": {
"Description": string
},
...
},
"DependentPlugins": list,
"Version": string,
"Created": timestamp,
"Latest": number,
"Enabled": boolean,
"FrameworkVersion": "x.x.x"
},
...
]
Raises:
500 - ChaliceViewError
"""
try:
plugin_class = urllib.parse.unquote(plugin_class)
content_group = urllib.parse.unquote(content_group)
print(
f"Listing the latest version of all the registered plugins for class '{plugin_class}' and content group '{content_group}'")
query_params = app.current_request.query_params
if query_params and query_params.get("include_disabled") == "true":
filter_expression = Attr("Enabled").is_in([True, False])
else:
filter_expression = Attr("Enabled").eq(True)
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.query(
IndexName=PLUGIN_VERSION_INDEX,
KeyConditionExpression=Key("Version").eq("v0"),
FilterExpression=Attr("Class").eq(plugin_class) & Attr("ContentGroups").contains(
content_group) & filter_expression
)
plugins = response["Items"]
while "LastEvaluatedKey" in response:
response = plugin_table.query(
IndexName=PLUGIN_VERSION_INDEX,
ExclusiveStartKey=response["LastEvaluatedKey"],
KeyConditionExpression=Key("Version").eq("v0"),
FilterExpression=Attr("Class").eq(plugin_class) & Attr("ContentGroups").contains(
content_group) & filter_expression
)
plugins.extend(response["Items"])
except Exception as e:
print(
f"Unable to list the latest version of all the registered plugins for class '{plugin_class}' and content group '{content_group}': {str(e)}")
raise ChaliceViewError(
f"Unable to list the latest version of all the registered plugins for class '{plugin_class}' and content group '{content_group}': {str(e)}")
else:
return replace_decimals(plugins)
@app.route('/plugin/{name}', cors=True, methods=['GET'], authorizer=authorizer)
def get_plugin_by_name(name):
"""
Get the latest version of a plugin by name.
Each plugin has version "v0" which holds a copy of the latest plugin revision.
Returns:
.. code-block:: python
{
"Name": string,
"Id": string,
"Class": ["Classifier"|"Optimizer"|"Featurer"|"Labeler"],
"Description": string,
"ContentGroups": list,
"ExecutionType": ["Sync"|"SyncModel"],
"SupportedMediaType": ["Video"|"Audio"],
"ExecuteLambdaQualifiedARN": arn,
"StateDefinition": string,
"ModelEndpoints": [
{
"Name": string,
"Version": string
},
...
],
"Configuration" : {
"configuration1": "value1",
...
},
"OutputAttributes" : {
"attribute1": {
"Description": string
},
...
},
"DependentPlugins": list,
"Version": string,
"Created": timestamp,
"Latest": number,
"Enabled": boolean,
"FrameworkVersion": "x.x.x"
}
Raises:
404 - NotFoundError
500 - ChaliceViewError
"""
try:
name = urllib.parse.unquote(name)
print(f"Getting the latest version of the plugin '{name}'")
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.get_item(
Key={
"Name": name,
"Version": "v0"
},
ConsistentRead=True
)
if "Item" not in response:
raise NotFoundError(f"Plugin '{name}' not found")
except NotFoundError as e:
print(f"Got chalice NotFoundError: {str(e)}")
raise
except Exception as e:
print(f"Unable to get the latest version of the plugin '{name}': {str(e)}")
raise ChaliceViewError(f"Unable to get the latest version of the plugin '{name}': {str(e)}")
else:
return replace_decimals(response["Item"])
@app.route('/plugin/{name}/version/{version}', cors=True, methods=['GET'], authorizer=authorizer)
def get_plugin_by_name_and_version(name, version):
"""
Get a plugin by name and version.
Returns:
.. code-block:: python
{
"Name": string,
"Id": string,
"Class": ["Classifier"|"Optimizer"|"Featurer"|"Labeler"],
"Description": string,
"ContentGroups": list,
"ExecutionType": ["Sync"|"SyncModel"],
"SupportedMediaType": ["Video"|"Audio"],
"ExecuteLambdaQualifiedARN": arn,
"StateDefinition": string,
"ModelEndpoints": [
{
"Name": string,
"Version": string
},
...
],
"Configuration" : {
"configuration1": "value1",
...
},
"OutputAttributes" : {
"attribute1": {
"Description": string
},
...
},
"DependentPlugins": list,
"Version": string,
"Created": timestamp,
["Latest": number],
["Enabled": boolean],
"FrameworkVersion": "x.x.x"
}
Raises:
404 - NotFoundError
500 - ChaliceViewError
"""
try:
name = urllib.parse.unquote(name)
version = urllib.parse.unquote(version)
print(f"Getting the plugin '{name}' with version '{version}'")
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.get_item(
Key={
"Name": name,
"Version": version
},
ConsistentRead=True
)
if "Item" not in response:
raise NotFoundError(f"Plugin '{name}' with version '{version}' not found")
except NotFoundError as e:
print(f"Got chalice NotFoundError: {str(e)}")
raise
except Exception as e:
print(f"Unable to get the plugin '{name}' with version '{version}': {str(e)}")
raise ChaliceViewError(f"Unable to get the plugin '{name}' with version '{version}': {str(e)}")
else:
return replace_decimals(response["Item"])
@app.route('/plugin/{name}/version/all', cors=True, methods=['GET'], authorizer=authorizer)
def list_plugin_versions(name):
"""
List all the versions of a plugin by name.
Returns:
.. code-block:: python
[
{
"Name": string,
"Id": string,
"Class": ["Classifier"|"Optimizer"|"Featurer"|"Labeler"],
"Description": string,
"ContentGroups": list,
"ExecutionType": ["Sync"|"SyncModel"],
"SupportedMediaType": ["Video"|"Audio"],
"ExecuteLambdaQualifiedARN": arn,
"StateDefinition": string,
"ModelEndpoints": [
{
"Name": string,
"Version": string
},
...
],
"Configuration" : {
"configuration1": "value1",
...
},
"OutputAttributes" : {
"attribute1": {
"Description": string
},
...
},
"DependentPlugins": list,
"Version": string,
"Created": timestamp,
["Latest": number],
["Enabled": boolean],
"FrameworkVersion": "x.x.x"
},
...
]
Raises:
404 - NotFoundError
500 - ChaliceViewError
"""
try:
name = urllib.parse.unquote(name)
print(f"Getting all the versions of the plugin '{name}'")
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.query(
IndexName=PLUGIN_NAME_INDEX,
KeyConditionExpression=Key("Name").eq(name)
)
if "Items" not in response or len(response["Items"]) < 1:
raise NotFoundError(f"Plugin '{name}' not found")
versions = response["Items"]
while "LastEvaluatedKey" in response:
response = plugin_table.query(
IndexName=PLUGIN_NAME_INDEX,
ExclusiveStartKey=response["LastEvaluatedKey"],
KeyConditionExpression=Key("Name").eq(name)
)
versions.extend(response["Items"])
# Remove version 'v0' from the query result
for index, version in enumerate(versions):
if version["Version"] == "v0":
versions.pop(index)
break
except NotFoundError as e:
print(f"Got chalice NotFoundError: {str(e)}")
raise
except Exception as e:
print(f"Unable to list the versions of the plugin '{name}': {str(e)}")
raise ChaliceViewError(f"Unable to list the versions of the plugin '{name}': {str(e)}")
else:
return replace_decimals(versions)
@app.route('/plugin/{name}', cors=True, methods=['DELETE'], authorizer=authorizer)
def delete_plugin(name):
"""
Delete all the versions of a plugin by name.
Returns:
None
Raises:
404 - NotFoundError
500 - ChaliceViewError
"""
try:
name = urllib.parse.unquote(name)
print(f"Deleting plugin '{name}' and all its versions")
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.query(
IndexName=PLUGIN_NAME_INDEX,
KeyConditionExpression=Key("Name").eq(name)
)
if "Items" not in response or len(response["Items"]) < 1:
raise NotFoundError(f"Plugin '{name}' not found")
versions = response["Items"]
while "LastEvaluatedKey" in response:
response = plugin_table.query(
IndexName=PLUGIN_NAME_INDEX,
ExclusiveStartKey=response["LastEvaluatedKey"],
KeyConditionExpression=Key("Name").eq(name)
)
versions.extend(response["Items"])
with plugin_table.batch_writer() as batch:
for item in versions:
batch.delete_item(
Key={
"Name": item["Name"],
"Version": item["Version"]
}
)
except NotFoundError as e:
print(f"Got chalice NotFoundError: {str(e)}")
raise
except Exception as e:
print(f"Unable to delete the plugin '{name}' and its versions: {str(e)}")
raise ChaliceViewError(f"Unable to delete the plugin '{name}' and its versions: {str(e)}")
else:
print(f"Deletion of plugin '{name}' and its versions successful")
return {}
@app.route('/plugin/{name}/version/{version}', cors=True, methods=['DELETE'], authorizer=authorizer)
def delete_plugin_version(name, version):
"""
Delete a specific version of a plugin by name and version.
Deletion can be performed on all the plugin versions except "v0" and the latest plugin revision.
If the latest plugin version needs to be deleted, publish a new version of the plugin and then
delete the prior plugin version.
Returns:
None
Raises:
400 - BadRequestError
404 - NotFoundError
500 - ChaliceViewError
"""
try:
name = urllib.parse.unquote(name)
version = urllib.parse.unquote(version)
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.get_item(
Key={
"Name": name,
"Version": "v0"
},
ConsistentRead=True
)
if "Item" not in response:
raise NotFoundError(f"Plugin '{name}' not found")
latest_version = "v" + str(response["Item"]["Latest"])
print(f"Deleting version '{version}' of the plugin '{name}'")
response = plugin_table.delete_item(
Key={
"Name": name,
"Version": version
},
ConditionExpression="NOT (#Version IN (:Value1, :Value2))",
ExpressionAttributeNames={
"#Version": "Version"
},
ExpressionAttributeValues={
":Value1": "v0",
":Value2": latest_version
},
ReturnValues="ALL_OLD"
)
if "Attributes" not in response:
raise NotFoundError(f"Plugin '{name}' with version '{version}' not found")
except NotFoundError as e:
print(f"Got chalice NotFoundError: {str(e)}")
raise
except ClientError as e:
print(f"Got DynamoDB ClientError: {str(e)}")
if e.response["Error"]["Code"] == "ConditionalCheckFailedException":
if version == "v0":
raise BadRequestError("Deletion of version 'v0' of the plugin is prohibited")
raise BadRequestError(
f"Deletion of version '{version}' of the plugin is blocked as it is the latest plugin revision. Publish a new version to unblock the deletion of version '{version}'")
else:
raise
except Exception as e:
print(f"Unable to delete version '{version}' of the plugin '{name}': {str(e)}")
raise ChaliceViewError(f"Unable to delete version '{version}' of the plugin '{name}': {str(e)}")
else:
print(f"Deletion of version '{version}' of the plugin '{name}' successful")
return {}
@app.route('/plugin/{name}/status', cors=True, methods=['PUT'], authorizer=authorizer)
def update_plugin_status(name):
"""
Enable or Disable the latest version of a plugin by name.
Body:
.. code-block:: python
{
"Enabled": boolean
}
Returns:
None
Raises:
400 - BadRequestError
404 - NotFoundError
500 - ChaliceViewError
"""
try:
name = urllib.parse.unquote(name)
status = json.loads(app.current_request.raw_body.decode())
validate(instance=status, schema=API_SCHEMA["update_status"])
print("Got a valid status schema")
print(f"Updating the status of the latest version of plugin '{name}'")
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
response = plugin_table.get_item(
Key={
"Name": name,
"Version": "v0"
},
ConsistentRead=True
)
if "Item" not in response:
raise NotFoundError(f"Plugin '{name}' not found")
latest_version = "v" + str(response["Item"]["Latest"])
# Update version v0
plugin_table.update_item(
Key={
"Name": name,
"Version": "v0"
},
UpdateExpression="SET #Enabled = :Status",
ExpressionAttributeNames={
"#Enabled": "Enabled"
},
ExpressionAttributeValues={
":Status": status["Enabled"]
}
)
# Update the latest version
plugin_table.update_item(
Key={
"Name": name,
"Version": latest_version
},
UpdateExpression="SET #Enabled = :Status",
ConditionExpression="attribute_exists(#Name) AND attribute_exists(#Version)",
ExpressionAttributeNames={
"#Enabled": "Enabled",
"#Name": "Name",
"#Version": "Version"
},
ExpressionAttributeValues={
":Status": status["Enabled"]
}
)
except ValidationError as e:
print(f"Got jsonschema ValidationError: {str(e)}")
raise BadRequestError(e.message)
except ClientError as e:
print(f"Got DynamoDB ClientError: {str(e)}")
if e.response["Error"]["Code"] == "ConditionalCheckFailedException":
raise NotFoundError(f"Plugin '{name}' with latest version '{latest_version}' not found")
else:
raise
except Exception as e:
print(f"Unable to update the status of the latest version of plugin '{name}': {str(e)}")
raise ChaliceViewError(f"Unable to update the status of the latest version of plugin '{name}': {str(e)}")
else:
return {}
@app.route('/plugin/{name}/version/{version}/status', cors=True, methods=['PUT'], authorizer=authorizer)
def update_plugin_version_status(name, version):
"""
Enable or Disable a plugin by name and version.
Body:
.. code-block:: python
{
"Enabled": boolean
}
Returns:
None
Raises:
400 - BadRequestError
404 - NotFoundError
500 - ChaliceViewError
"""
try:
name = urllib.parse.unquote(name)
version = urllib.parse.unquote(version)
status = json.loads(app.current_request.raw_body.decode())
validate(instance=status, schema=API_SCHEMA["update_status"])
print("Got a valid status schema")
print(f"Updating the status of the plugin '{name}' with version '{version}'")
plugin_table = ddb_resource.Table(PLUGIN_TABLE_NAME)
plugin_table.update_item(
Key={
"Name": name,
"Version": version
},
UpdateExpression="SET #Enabled = :Status",
ConditionExpression="attribute_exists(#Name) AND attribute_exists(#Version)",
ExpressionAttributeNames={
"#Enabled": "Enabled",
"#Name": "Name",
"#Version": "Version"
},
ExpressionAttributeValues={
":Status": status["Enabled"]
}
)
except ValidationError as e:
print(f"Got jsonschema ValidationError: {str(e)}")
raise BadRequestError(e.message)
except ClientError as e:
print(f"Got DynamoDB ClientError: {str(e)}")
if e.response["Error"]["Code"] == "ConditionalCheckFailedException":
raise NotFoundError(f"Plugin '{name}' with version '{version}' not found")
else:
raise
except Exception as e:
print(f"Unable to update the status of the plugin '{name}' with version '{version}': {str(e)}")
raise ChaliceViewError(f"Unable to update the status of the plugin '{name}' with version '{version}': {str(e)}")
else:
return {}
| 36.648206
| 539
| 0.527467
| 4,050
| 46,983
| 6.014321
| 0.084444
| 0.023031
| 0.008211
| 0.009237
| 0.778102
| 0.762008
| 0.745792
| 0.737992
| 0.703793
| 0.684826
| 0
| 0.00502
| 0.368218
| 46,983
| 1,281
| 540
| 36.676815
| 0.815585
| 0.301045
| 0
| 0.537459
| 0
| 0.017915
| 0.280017
| 0.025216
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019544
| false
| 0
| 0.026059
| 0
| 0.065147
| 0.07329
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d7fa3ceb34d2a94aca492ad2f0161c28d4faf6dd
| 46
|
py
|
Python
|
src/dependencmake/exceptions.py
|
pzehner/dependencmake
|
00feb4e52d1b35dac9f85937d80eafbb50c8c452
|
[
"MIT"
] | null | null | null |
src/dependencmake/exceptions.py
|
pzehner/dependencmake
|
00feb4e52d1b35dac9f85937d80eafbb50c8c452
|
[
"MIT"
] | null | null | null |
src/dependencmake/exceptions.py
|
pzehner/dependencmake
|
00feb4e52d1b35dac9f85937d80eafbb50c8c452
|
[
"MIT"
] | null | null | null |
class DependenCmakeError(Exception):
pass
| 15.333333
| 36
| 0.782609
| 4
| 46
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 37
| 23
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
cc1527c50a7b6c9f224cf01ce2f6ec9761be8ecf
| 278
|
py
|
Python
|
cortex/utils/dispatchers/__init__.py
|
chib0/asd-winter2019
|
c7d95305b1e8b99013fd40da1e7ebe01c2d0102a
|
[
"Apache-2.0"
] | null | null | null |
cortex/utils/dispatchers/__init__.py
|
chib0/asd-winter2019
|
c7d95305b1e8b99013fd40da1e7ebe01c2d0102a
|
[
"Apache-2.0"
] | 4
|
2021-02-02T22:38:53.000Z
|
2022-01-13T02:32:33.000Z
|
cortex/utils/dispatchers/__init__.py
|
chib0/asd-winter2019
|
c7d95305b1e8b99013fd40da1e7ebe01c2d0102a
|
[
"Apache-2.0"
] | null | null | null |
#from cortex.utils.dispatchers.repository import get_dispatcher
from cortex.utils.dispatchers.topic_consumer import get_topic_consumer
from cortex.utils.dispatchers.topic_dispatcher import get_topic_dispatcher
from cortex.utils.dispatchers import tee
from . import repository
| 34.75
| 74
| 0.874101
| 37
| 278
| 6.378378
| 0.297297
| 0.169492
| 0.254237
| 0.440678
| 0.457627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079137
| 278
| 7
| 75
| 39.714286
| 0.921875
| 0.223022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cc2327fd2f237ae3b7e5979ca659cc75e22fba5e
| 34
|
py
|
Python
|
zhaquirks/sunricher/__init__.py
|
watercrossing/zha-device-handlers
|
6eef3574b31a7e8f78358b80113e98b571ebd611
|
[
"Apache-2.0"
] | null | null | null |
zhaquirks/sunricher/__init__.py
|
watercrossing/zha-device-handlers
|
6eef3574b31a7e8f78358b80113e98b571ebd611
|
[
"Apache-2.0"
] | null | null | null |
zhaquirks/sunricher/__init__.py
|
watercrossing/zha-device-handlers
|
6eef3574b31a7e8f78358b80113e98b571ebd611
|
[
"Apache-2.0"
] | null | null | null |
"""Module for Sunricher devices"""
| 34
| 34
| 0.735294
| 4
| 34
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.806452
| 0.823529
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cc26fe1e37762d2cb6a2d1af6b05f14c386e267a
| 1,201
|
py
|
Python
|
string_calculator/v1/test_calc.py
|
foobacca/tdd-kata
|
7184ca68ef0b9f234815b349f87b66d7b1ef4a05
|
[
"MIT"
] | null | null | null |
string_calculator/v1/test_calc.py
|
foobacca/tdd-kata
|
7184ca68ef0b9f234815b349f87b66d7b1ef4a05
|
[
"MIT"
] | null | null | null |
string_calculator/v1/test_calc.py
|
foobacca/tdd-kata
|
7184ca68ef0b9f234815b349f87b66d7b1ef4a05
|
[
"MIT"
] | null | null | null |
import pytest
from .calc import calculator
def test_calculator_with_empty_string_check_returns_zero():
assert calculator('') == 0
def test_calculator_with_single_number_check_returns_number():
assert calculator('53') == 53
def test_calculator_with_two_comma_separated_numbers_check_returns_sum():
assert calculator('53,5') == 58
def test_calculator_with_newline_separated_numbers_check_returns_sum():
assert calculator('53\n5') == 58
def test_calculator_with_three_comma_separated_numbers_check_returns_sum():
assert calculator('53,5,11') == 69
def test_calculator_with_three_newline_separated_numbers_check_returns_sum():
assert calculator('53\n5\n11') == 69
def test_calculator_with_three_numbers_one_comma_one_newline_check_returns_sum():
assert calculator('53,5\n11') == 69
def test_calculator_with_negative_numbers_check_raises_error():
with pytest.raises(Exception):
calculator('-2')
def test_calculator_with_three_numbers_one_equals_1000_check_large_number_ignored():
assert calculator('53,1000,5') == 1058
def test_calculator_with_three_numbers_one_gt_1000_check_large_number_ignored():
assert calculator('53,5555,5') == 58
| 26.688889
| 84
| 0.800167
| 168
| 1,201
| 5.190476
| 0.261905
| 0.080275
| 0.194954
| 0.240826
| 0.651376
| 0.59289
| 0.525229
| 0.362385
| 0.259174
| 0.259174
| 0
| 0.060918
| 0.111574
| 1,201
| 44
| 85
| 27.295455
| 0.756326
| 0
| 0
| 0
| 0
| 0
| 0.045795
| 0
| 0
| 0
| 0
| 0
| 0.391304
| 1
| 0.434783
| true
| 0
| 0.086957
| 0
| 0.521739
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
cc30a7b429d8237e9407f788401beb2829dd44b8
| 239
|
py
|
Python
|
fasttest/common/__init__.py
|
xinxi1990/fasttest
|
51c807f038e9b03ae31b658815ca1d1b422d41a7
|
[
"MIT"
] | null | null | null |
fasttest/common/__init__.py
|
xinxi1990/fasttest
|
51c807f038e9b03ae31b658815ca1d1b422d41a7
|
[
"MIT"
] | null | null | null |
fasttest/common/__init__.py
|
xinxi1990/fasttest
|
51c807f038e9b03ae31b658815ca1d1b422d41a7
|
[
"MIT"
] | 1
|
2020-12-15T03:42:41.000Z
|
2020-12-15T03:42:41.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from fasttest.common.dict import Dict
from fasttest.common.variable_global import Var
from fasttest.common.logging import log_info, log_error
__all__ = ['log_info','log_error','Var', 'Dict']
| 29.875
| 55
| 0.748954
| 36
| 239
| 4.722222
| 0.555556
| 0.211765
| 0.317647
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009346
| 0.104603
| 239
| 7
| 56
| 34.142857
| 0.785047
| 0.179916
| 0
| 0
| 0
| 0
| 0.123711
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0bc5b0a9e77aaa632a3e59f45ee60a57b130009a
| 153
|
py
|
Python
|
sigdepsem/__init__.py
|
Ricyteach/sigdepsem
|
80aea4f891450010e5949f4ebcbdc5aae7f91ab4
|
[
"MIT"
] | null | null | null |
sigdepsem/__init__.py
|
Ricyteach/sigdepsem
|
80aea4f891450010e5949f4ebcbdc5aae7f91ab4
|
[
"MIT"
] | null | null | null |
sigdepsem/__init__.py
|
Ricyteach/sigdepsem
|
80aea4f891450010e5949f4ebcbdc5aae7f91ab4
|
[
"MIT"
] | null | null | null |
"""Demonstrate signature dependent semantics for item get, set, del"""
__version__ = "0.1"
from .main import SigDepMeta, signature_dependent_semantics
| 25.5
| 70
| 0.784314
| 19
| 153
| 6
| 0.842105
| 0.315789
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.124183
| 153
| 5
| 71
| 30.6
| 0.835821
| 0.418301
| 0
| 0
| 0
| 0
| 0.036145
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0be77d06743e4b334bf10a7ebb3f92b40f6ca210
| 51
|
py
|
Python
|
python/plp_utils/__init__.py
|
pleprince/maya_utils
|
c0e89f5757077f111354cb92888e0cca30060938
|
[
"CC-BY-4.0"
] | null | null | null |
python/plp_utils/__init__.py
|
pleprince/maya_utils
|
c0e89f5757077f111354cb92888e0cca30060938
|
[
"CC-BY-4.0"
] | null | null | null |
python/plp_utils/__init__.py
|
pleprince/maya_utils
|
c0e89f5757077f111354cb92888e0cca30060938
|
[
"CC-BY-4.0"
] | null | null | null |
# philippe leprince
# Fri Nov 17 18:16:16 GMT 2017
| 17
| 30
| 0.72549
| 10
| 51
| 3.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.292683
| 0.196078
| 51
| 2
| 31
| 25.5
| 0.609756
| 0.901961
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0424d070c31874e136fbe0f9ba306d437a31b594
| 702
|
py
|
Python
|
SecretColors/data/palettes/__init__.py
|
secretBiology/SecretColors
|
7c2ef921947bae93321b56a3b01046b7798a344f
|
[
"MIT"
] | 32
|
2019-06-03T08:45:33.000Z
|
2022-02-03T15:06:59.000Z
|
SecretColors/data/palettes/__init__.py
|
secretBiology/SecretColors
|
7c2ef921947bae93321b56a3b01046b7798a344f
|
[
"MIT"
] | 7
|
2019-11-19T08:39:06.000Z
|
2022-03-29T14:04:47.000Z
|
SecretColors/data/palettes/__init__.py
|
secretBiology/SecretColors
|
7c2ef921947bae93321b56a3b01046b7798a344f
|
[
"MIT"
] | 5
|
2019-06-04T09:18:14.000Z
|
2022-03-15T05:30:14.000Z
|
# Copyright (c) SecretBiology 2020.
#
# Library Name: SecretColors
# Author: Rohit Suratekar
# Website: https://github.com/secretBiology/SecretColors
#
# Most of these palettes are derived from various design systems. Few
# examples of such design systems can be found on following URL
# https://designsystemsrepo.com/design-systems
from SecretColors.data.palettes.parent import ParentPalette
from SecretColors.data.palettes.ibm import IBMPalette
from SecretColors.data.palettes.material import MaterialPalette
from SecretColors.data.palettes.clarity import ClarityPalette
from SecretColors.data.palettes.brewer import ColorBrewer
from SecretColors.data.palettes.tableau import TableauPalette
| 41.294118
| 70
| 0.824786
| 85
| 702
| 6.811765
| 0.564706
| 0.165803
| 0.207254
| 0.290155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0064
| 0.109687
| 702
| 16
| 71
| 43.875
| 0.92
| 0.457265
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0b04465ebaa3b9f441caa0e70a60ef4124b5a240
| 90
|
py
|
Python
|
python/triton/language/__init__.py
|
h-vetinari/triton
|
d9dd97492f228020573b39a9cec14ee3b8776957
|
[
"MIT"
] | 146
|
2015-12-29T03:42:45.000Z
|
2020-02-05T14:50:55.000Z
|
python/triton/language/__init__.py
|
h-vetinari/triton
|
d9dd97492f228020573b39a9cec14ee3b8776957
|
[
"MIT"
] | 28
|
2015-12-26T01:38:22.000Z
|
2018-11-18T05:20:26.000Z
|
python/triton/language/__init__.py
|
h-vetinari/triton
|
d9dd97492f228020573b39a9cec14ee3b8776957
|
[
"MIT"
] | 52
|
2016-02-26T17:27:28.000Z
|
2020-01-20T03:13:40.000Z
|
# flake8: noqa: F401
from . import core, random
from .core import *
from .random import *
| 18
| 26
| 0.711111
| 13
| 90
| 4.923077
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054795
| 0.188889
| 90
| 4
| 27
| 22.5
| 0.821918
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0b180f6f703a1483ab0a1f7fc828994dc9090792
| 52
|
py
|
Python
|
__init__.py
|
minhht-0134/redmine_sample
|
187364109ba245e035b304356e156a1b82ec43ad
|
[
"MIT"
] | null | null | null |
__init__.py
|
minhht-0134/redmine_sample
|
187364109ba245e035b304356e156a1b82ec43ad
|
[
"MIT"
] | null | null | null |
__init__.py
|
minhht-0134/redmine_sample
|
187364109ba245e035b304356e156a1b82ec43ad
|
[
"MIT"
] | null | null | null |
from environs import Env
env = Env()
env.read_env()
| 13
| 24
| 0.730769
| 9
| 52
| 4.111111
| 0.555556
| 0.486486
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 52
| 4
| 25
| 13
| 0.840909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9be0cf30f2757ca5f52e03ff7e939cb778659618
| 168
|
py
|
Python
|
userprofile/forms.py
|
Xlj100512/myblog
|
f170d621539a118af02a0ee5a2392f9f0c2a6b05
|
[
"MIT"
] | null | null | null |
userprofile/forms.py
|
Xlj100512/myblog
|
f170d621539a118af02a0ee5a2392f9f0c2a6b05
|
[
"MIT"
] | null | null | null |
userprofile/forms.py
|
Xlj100512/myblog
|
f170d621539a118af02a0ee5a2392f9f0c2a6b05
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth.models import User
class UserLoginForm(forms.Form):
username = forms.CharField()
password = forms.CharField()
| 21
| 43
| 0.767857
| 21
| 168
| 6.142857
| 0.666667
| 0.155039
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 168
| 7
| 44
| 24
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
503dd2da1a1bdc872f4fda5cb0e79b2ca280864b
| 40
|
py
|
Python
|
j4j_handler/api_ux_handler/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
j4j_handler/api_ux_handler/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
j4j_handler/api_ux_handler/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
from .j4j_api_ux import J4J_APIUXHandler
| 40
| 40
| 0.9
| 7
| 40
| 4.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 0.075
| 40
| 1
| 40
| 40
| 0.837838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
504e62de05d67ab741a3e2e94e5f37229348f7b7
| 176
|
py
|
Python
|
src/AuShadha/demographics/demographics/admin.py
|
GosthMan/AuShadha
|
3ab48825a0dba19bf880b6ac6141ab7a6adf1f3e
|
[
"PostgreSQL"
] | 46
|
2015-03-04T14:19:47.000Z
|
2021-12-09T02:58:46.000Z
|
src/AuShadha/demographics/demographics/admin.py
|
aytida23/AuShadha
|
3ab48825a0dba19bf880b6ac6141ab7a6adf1f3e
|
[
"PostgreSQL"
] | 2
|
2015-06-05T10:29:04.000Z
|
2015-12-06T16:54:10.000Z
|
src/AuShadha/demographics/demographics/admin.py
|
aytida23/AuShadha
|
3ab48825a0dba19bf880b6ac6141ab7a6adf1f3e
|
[
"PostgreSQL"
] | 24
|
2015-03-23T01:38:11.000Z
|
2022-01-24T16:23:42.000Z
|
from django.contrib import admin
from .models import Demographics
class DemographicsAdmin(admin.ModelAdmin):
pass
admin.site.register(Demographics, DemographicsAdmin)
| 16
| 52
| 0.8125
| 19
| 176
| 7.526316
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 176
| 10
| 53
| 17.6
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
504fbcffb1ff3547cba58dbd27e0ec9d265de07a
| 68,022
|
py
|
Python
|
test/python/test_onnx.py
|
XinChCh/singa
|
93fd9da72694e68bfe3fb29d0183a65263d238a1
|
[
"Apache-2.0"
] | 2,354
|
2015-05-05T03:01:56.000Z
|
2019-10-22T15:08:11.000Z
|
test/python/test_onnx.py
|
Dadaguaibuhaoyisi/singa
|
93fd9da72694e68bfe3fb29d0183a65263d238a1
|
[
"Apache-2.0"
] | 332
|
2019-10-24T15:06:32.000Z
|
2022-03-07T06:22:32.000Z
|
test/python/test_onnx.py
|
zlheui/singa
|
ced9e9d44c200d709db5a2354076390788986b77
|
[
"Apache-2.0"
] | 607
|
2015-05-03T14:09:05.000Z
|
2019-10-21T09:49:21.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import unittest
from builtins import str
from singa import singa_wrap as singa_api
from singa import tensor
from singa import singa_wrap as singa
from singa import autograd
from singa import layer
from singa import sonnx
from singa import opt
import onnx
from onnx import (defs, checker, helper, numpy_helper, mapping, ModelProto,
GraphProto, NodeProto, AttributeProto, TensorProto,
OperatorSetIdProto)
from onnx.helper import make_tensor, make_tensor_value_info, make_node, make_graph
from cuda_helper import gpu_dev, cpu_dev
import numpy as np
autograd.training = True
def _tuple_to_string(t):
lt = [str(x) for x in t]
return '(' + ', '.join(lt) + ')'
class TestPythonOnnx(unittest.TestCase):
def check_shape(self, actual, expect):
self.assertEqual(
actual, expect, 'shape mismatch, actual shape is %s'
' exepcted is %s' %
(_tuple_to_string(actual), _tuple_to_string(expect)))
def _conv2d_helper(self, dev):
x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
x.gaussian(0.0, 1.0)
y = layer.Conv2d(1, 2)(x)
# frontend
model = sonnx.to_onnx([x], [y])
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_conv2d_cpu(self):
self._conv2d_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_conv2d_gpu(self):
self._conv2d_helper(gpu_dev)
def _relu_helper(self, dev):
X = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
0.5]).reshape(3, 2).astype(np.float32)
XT = np.array([0.8, 0, 3.3, 0, 0, 0.5]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.ReLU()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_relu_cpu(self):
self._relu_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_relu_gpu(self):
self._relu_helper(gpu_dev)
def _avg_pool_helper(self, dev):
x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
x.gaussian(0.0, 1.0)
y = layer.AvgPool2d(3, 1, 2)(x)
# frontend
model = sonnx.to_onnx([x], [y])
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_avg_pool_cpu(self):
self._avg_pool_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_avg_pool_gpu(self):
self._avg_pool_helper(gpu_dev)
def _softmax_helper(self, dev):
X = np.array([[-1, 0, 1]]).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.SoftMax()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_softmax_cpu(self):
self._softmax_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_softmax_gpu(self):
self._softmax_helper(gpu_dev)
def _sigmoid_helper(self, dev):
X = np.array([[-1, 0, 1]]).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.Sigmoid()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_sigmoid_cpu(self):
self._sigmoid_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_sigmoid_gpu(self):
self._sigmoid_helper(gpu_dev)
def _add_helper(self, dev):
X1 = np.random.randn(3, 4, 5).astype(np.float32)
X2 = np.random.randn(3, 4, 5).astype(np.float32)
x1 = tensor.from_numpy(X1)
x2 = tensor.from_numpy(X2)
x1.to_device(dev)
x2.to_device(dev)
y = autograd.Add()(x1, x2)[0]
# frontend
model = sonnx.to_onnx([x1, x2], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x1, x2])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_add_cpu(self):
self._add_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_add_gpu(self):
self._add_helper(gpu_dev)
def _concat_helper(self, dev):
X1 = np.random.randn(3, 4, 5).astype(np.float32)
X2 = np.random.randn(3, 4, 5).astype(np.float32)
x1 = tensor.from_numpy(X1)
x2 = tensor.from_numpy(X2)
x1.to_device(dev)
x2.to_device(dev)
y = autograd.Concat()(x1, x2)[0]
# frontend
model = sonnx.to_onnx([x1, x2], [y])
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x1, x2])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_concat_cpu(self):
self._concat_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_concat_gpu(self):
self._concat_helper(gpu_dev)
def _matmul_helper(self, dev):
X1 = np.random.randn(4, 5).astype(np.float32)
X2 = np.random.randn(5, 4).astype(np.float32)
x1 = tensor.from_numpy(X1)
x2 = tensor.from_numpy(X2)
x1.to_device(dev)
x2.to_device(dev)
y = autograd.Matmul()(x1, x2)[0]
# frontend
model = sonnx.to_onnx([x1, x2], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x1, x2])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_matmul_cpu(self):
self._matmul_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_matmul_gpu(self):
self._matmul_helper(gpu_dev)
def _max_pool_helper(self, dev):
x = tensor.Tensor(shape=(2, 3, 4, 4), device=dev)
x.gaussian(0.0, 1.0)
y = layer.MaxPool2d(2, 2, 0)(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_max_pool_cpu(self):
self._max_pool_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_max_pool_gpu(self):
self._max_pool_helper(gpu_dev)
def _batch_norm_helper(self, dev):
x = np.array([[[[-1, 0, 1]], [[2, 3, 4]]]]).astype(np.float32)
s = np.array([1.0, 1.5]).astype(np.float32)
bias = np.array([0, 1]).astype(np.float32)
mean = np.array([0, 3]).astype(np.float32)
var = np.array([1, 1.5]).astype(np.float32)
x = tensor.from_numpy(x)
x.to_device(dev)
s = tensor.from_numpy(s)
s.to_device(dev)
bias = tensor.from_numpy(bias)
mean = tensor.from_numpy(mean)
var = tensor.from_numpy(var)
bias.to_device(dev)
mean.to_device(dev)
var.to_device(dev)
if dev == cpu_dev:
handle = singa.BatchNormHandle(0.9, x.data)
else:
handle = singa.CudnnBatchNormHandle(0.9, x.data)
y = autograd.batchnorm_2d(handle, x, s, bias, mean, var)
# frontend
model = sonnx.to_onnx([x, s, bias, mean, var], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x, s, bias]) # mean and var has been stored in graph
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_batch_norm_cpu(self):
self._batch_norm_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_batch_norm_gpu(self):
self._batch_norm_helper(gpu_dev)
def _linear_helper(self, dev):
x = tensor.Tensor(shape=(2, 20), device=dev)
x.gaussian(0.0, 1.0)
x1 = x.clone()
y = layer.Linear(20, 1, bias=False)(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_linear_cpu(self):
self._linear_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_linear_gpu(self):
self._linear_helper(gpu_dev)
def _gemm_helper(self, dev):
A = np.random.randn(2, 3).astype(np.float32)
B = np.random.rand(3, 4).astype(np.float32)
C = np.random.rand(2, 4).astype(np.float32)
alpha = 1.0
beta = 2.0
tA = tensor.from_numpy(A)
tB = tensor.from_numpy(B)
tC = tensor.from_numpy(C)
tA.to_device(dev)
tB.to_device(dev)
tC.to_device(dev)
y = autograd.Gemm(alpha, beta, 0, 0)(tA, tB, tC)[0]
# frontend
model = sonnx.to_onnx([tA, tB, tC], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([tA, tB, tC])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_gemm_cpu(self):
self._gemm_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_gemm_gpu(self):
self._gemm_helper(gpu_dev)
def _reshape_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd.Reshape((2, 3))(x)[0]
# frontend
model = sonnx.to_onnx([x, (2, 3)], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x]) # shape has been stored in graph
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_reshape_cpu(self):
self._reshape_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_reshape_gpu(self):
self._reshape_helper(gpu_dev)
def _sum_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x1 = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
x1 = tensor.from_numpy(x1)
y = autograd.Sum()(x, x1)[0]
# frontend
model = sonnx.to_onnx([x, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_sum_cpu(self):
self._sum_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_sum_gpu(self):
self._sum_helper(gpu_dev)
def _Cos_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Cos()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Cos_cpu(self):
self._Cos_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Cos_gpu(self):
self._Cos_helper(gpu_dev)
def _Cosh_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Cosh()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Cosh_cpu(self):
self._Cosh_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Cosh_gpu(self):
self._Cosh_helper(gpu_dev)
def _Sin_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Sin()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Sin_cpu(self):
self._Sin_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Sin_gpu(self):
self._Sin_helper(gpu_dev)
def _Sinh_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Sinh()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Sinh_cpu(self):
self._Sinh_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Sinh_gpu(self):
self._Sinh_helper(gpu_dev)
def _Tan_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Tan()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Tan_cpu(self):
self._Tan_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Tan_gpu(self):
self._Tan_helper(gpu_dev)
def _Tanh_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Tanh()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Tanh_cpu(self):
self._Tanh_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Tanh_gpu(self):
self._Tanh_helper(gpu_dev)
def _Acos_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Acos()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Acos_cpu(self):
self._Acos_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Acos_gpu(self):
self._Acos_helper(gpu_dev)
def _Acosh_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Acosh()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Acosh_cpu(self):
self._Acosh_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Acosh_gpu(self):
self._Acosh_helper(gpu_dev)
def _Asin_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Asin()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Asin_cpu(self):
self._Asin_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Asin_gpu(self):
self._Asin_helper(gpu_dev)
def _Asinh_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Asinh()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Asinh_cpu(self):
self._Asinh_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Asinh_gpu(self):
self._Asinh_helper(gpu_dev)
def _Atan_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Atan()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Atan_cpu(self):
self._Atan_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Atan_gpu(self):
self._Atan_helper(gpu_dev)
def _Atanh_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.Atanh()(x)[0]
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Atanh_cpu(self):
self._Atanh_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Atanh_gpu(self):
self._Atanh_helper(gpu_dev)
def _SeLu_helper(self, dev):
x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
#y = gamma * (alpha * e^x - alpha) for x <= 0, y = gamma * x for x > 0
a = 1.67326
g = 1.0507
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd.selu(x, a, g)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_SeLu_cpu(self):
self._SeLu_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_SeLu_gpu(self):
self._SeLu_helper(gpu_dev)
def _ELu_helper(self, dev):
x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
#y = gamma * (alpha * e^x - alpha) for x <= 0, y = gamma * x for x > 0
a = 1.
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd.elu(x, a)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_ELu_cpu(self):
self._ELu_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_ELu_gpu(self):
self._ELu_helper(gpu_dev)
# No Op registered for equal with domain_version of 11
# def _Equal_helper(self, dev):
# x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
# 0.9]).reshape(3, 2).astype(np.float32)
# x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
# 2).astype(np.float32)
# x0 = tensor.from_numpy(x0)
# x1 = tensor.from_numpy(x1)
# x0.to_device(dev)
# x1.to_device(dev)
# y = autograd.equal(x0, x1)
# # frontend
# model = sonnx.to_onnx([x0, x1], [y])
# # print('The model is:\n{}'.format(model))
# # backend
# sg_ir = sonnx.prepare(model, device=dev)
# sg_ir.is_graph = True
# y_t = sg_ir.run([x0, x1])
# np.testing.assert_array_almost_equal(tensor.to_numpy(y),
# tensor.to_numpy(y_t[0]),
# decimal=5)
# def test_Equal_cpu(self):
# self._Equal_helper(cpu_dev)
# @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
# def test_Equal_gpu(self):
# self._Equal_helper(gpu_dev)
def _Less_helper(self, dev):
x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd.less(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Less_cpu(self):
self._Less_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Less_gpu(self):
self._Less_helper(gpu_dev)
def _Sign_helper(self, dev):
x = np.array([0.8, -1.2, 3.3, -3.6, -0.5,
0.5]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
y = autograd.sign(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Sign_cpu(self):
self._Sign_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Sign_gpu(self):
self._Sign_helper(gpu_dev)
def _Div_helper(self, dev):
x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd.div(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Div_cpu(self):
self._Div_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Div_gpu(self):
self._Div_helper(gpu_dev)
def _Sub_helper(self, dev):
x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd.sub(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Sub_cpu(self):
self._Sub_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Sub_gpu(self):
self._Sub_helper(gpu_dev)
def _Sqrt_helper(self, dev):
X = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.sqrt(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev, init_inputs=X)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Sqrt_cpu(self):
self._Sqrt_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Sqrt_gpu(self):
self._Sqrt_helper(gpu_dev)
def _Greater_helper(self, dev):
x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(cpu_dev)
x1.to_device(cpu_dev)
y = autograd.greater(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_Greater_cpu(self):
self._Greater_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_Greater_gpu(self):
self._Greater_helper(gpu_dev)
def _HardSigmoid_helper(self, dev):
x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
a = 0.2
g = 0.5
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd.hardsigmoid(x, a, g)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_HardSigmoid_cpu(self):
self._HardSigmoid_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_HardSigmoid_gpu(self):
self._HardSigmoid_helper(gpu_dev)
def _identity_helper(self, dev):
x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd.identity(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_identity_cpu(self):
self._identity_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_identity_gpu(self):
self._identity_helper(gpu_dev)
def _softplus_helper(self, dev):
x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd.softplus(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_softplus_cpu(self):
self._softplus_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_softplus_gpu(self):
self._softplus_helper(gpu_dev)
def _softsign_helper(self, dev):
x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd.softsign(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_softsign_cpu(self):
self._softsign_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_softsign_gpu(self):
self._softsign_helper(gpu_dev)
def _mean_helper(self, dev):
x0 = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd.mean(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_mean_cpu(self):
self._mean_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_mean_gpu(self):
self._mean_helper(gpu_dev)
def _pow_helper(self, dev):
x0 = np.array([7, 5, 0.2, 0.1, 0.3, 4]).reshape(3, 2).astype(np.float32)
x1 = np.array([-1.0, 2.0, -1.0, -2.1, 1.0,
-2.0]).reshape(3, 2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd.mean(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_pow_cpu(self):
self._pow_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_pow_gpu(self):
self._pow_helper(gpu_dev)
def _clip_helper(self, dev):
x = np.array([-0.9, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
min = -0.5
max = 0.5
x.to_device(dev)
y = autograd.clip(x, min, max)
# frontend
model = sonnx.to_onnx([x, min, max], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x]) # min, max has been stored in model
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_clip_cpu(self):
self._clip_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_clip_gpu(self):
self._clip_helper(gpu_dev)
def _prelu_helper(self, dev):
x = np.array([0.1, -1.0, -0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
slope = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
slope = tensor.from_numpy(slope)
x.to_device(dev)
slope.to_device(dev)
y = autograd.prelu(x, slope)
# frontend
model = sonnx.to_onnx([x, slope], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x, slope])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_prelu_cpu(self):
self._prelu_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_prelu_gpu(self):
self._prelu_helper(gpu_dev)
def _mul_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x1 = np.array([0.1, 1.0, 0.4, 4.0, 0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
x1 = tensor.from_numpy(x1)
x.to_device(dev)
x1.to_device(dev)
y = autograd.mul(x, x1)
# frontend
model = sonnx.to_onnx([x, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_mul_cpu(self):
self._mul_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_mul_gpu(self):
self._mul_helper(gpu_dev)
def _transpose_helper(self, dev):
x = np.random.randn(3, 2, 1)
y = x.transpose(1, 2, 0)
x = tensor.from_numpy(x)
x.to_device(cpu_dev)
y = autograd.transpose(x, (1, 2, 0))
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_transpose_cpu(self):
self._transpose_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_transpose_gpu(self):
self._transpose_helper(gpu_dev)
def _max_helper(self, dev):
X0 = np.array([0.1, 0.2, 2.0, 0.0, 0.1,
0.2]).reshape(3, 2).astype(np.float32)
X1 = np.array([1.0, 2.0, 1.0, 2.1, 0.0,
2.0]).reshape(3, 2).astype(np.float32)
x0 = tensor.from_numpy(X0)
x1 = tensor.from_numpy(X1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd.max(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_max_cpu(self):
self._max_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_max_gpu(self):
self._max_helper(gpu_dev)
def _min_helper(self, dev):
X0 = np.array([0.1, 0.2, 2.0, 0.0, 0.1,
0.2]).reshape(3, 2).astype(np.float32)
X1 = np.array([1.0, 2.0, 1.0, 2.1, 0.0,
2.0]).reshape(3, 2).astype(np.float32)
x0 = tensor.from_numpy(X0)
x1 = tensor.from_numpy(X1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd.min(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_min_cpu(self):
self._min_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_min_gpu(self):
self._min_helper(gpu_dev)
def _shape_helper(self, dev):
x = np.array([0.1, -1.0, 0.4, 4.0, -0.9,
9.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd.shape(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_shape_cpu(self):
self._shape_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_shape_gpu(self):
self._shape_helper(gpu_dev)
def _and_helper(self, dev):
x0 = np.array([0, -0.3, -0.1, 0.1, 0.5,
0.9]).reshape(3, 2).astype(np.float32)
x1 = np.array([0, -0.3, 0, 0.1, 0.5, 0.9]).reshape(3,
2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd._and(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_and_cpu(self):
self._and_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_and_gpu(self):
self._and_helper(gpu_dev)
def _or_helper(self, dev):
x0 = np.array([1.0, 1.0, 2.0, -3.0, 0,
-7.0]).reshape(3, 2).astype(np.float32)
x1 = np.array([-1.0, 0, 2.0, 4.0, 0,
-7.0]).reshape(3, 2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd._or(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_or_cpu(self):
self._or_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_or_gpu(self):
self._or_helper(gpu_dev)
def _xor_helper(self, dev):
x0 = np.array([0, -0.3, -0.1, 0.1, 0.5,
9.0]).reshape(3, 2).astype(np.float32)
x1 = np.array([0, -0.3, 0, 0.1, 0, 0.9]).reshape(3,
2).astype(np.float32)
x0 = tensor.from_numpy(x0)
x1 = tensor.from_numpy(x1)
x0.to_device(dev)
x1.to_device(dev)
y = autograd._xor(x0, x1)
# frontend
model = sonnx.to_onnx([x0, x1], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x0, x1])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_xor_cpu(self):
self._xor_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_xor_gpu(self):
self._xor_helper(gpu_dev)
def _not_helper(self, dev):
x = np.array([1.0, -1.0, 0, -0.1, 0,
-7.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(x)
x.to_device(dev)
y = autograd._not(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_not_cpu(self):
self._not_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_not_gpu(self):
self._not_helper(gpu_dev)
def _negative_helper(self, dev):
X = np.array([0.1, 0, 0.4, 1. - 4, 0.9,
-2.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.negative(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_negative_cpu(self):
self._negative_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_negative_gpu(self):
self._negative_helper(gpu_dev)
def _reciprocal_helper(self, dev):
X = np.array([0.1, 0, 0.4, 1. - 4, 0.9,
-2.0]).reshape(3, 2).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(cpu_dev)
y = autograd.reciprocal(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_reciprocal_cpu(self):
self._reciprocal_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_reciprocal_gpu(self):
self._reciprocal_helper(gpu_dev)
def _constantOfShape_helper(self, dev):
X = np.array([4, 3, 2]).astype(np.int64)
x = tensor.from_numpy(X)
x.to_device(cpu_dev)
y = autograd.constant_of_shape(x, 1.)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev, init_inputs=[X])
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(tensor.to_numpy(y),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_constantOfShape_cpu(self):
self._constantOfShape_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_constantOfShape_gpu(self):
self._constantOfShape_helper(gpu_dev)
def _dropout_helper(self, dev):
X = np.random.randn(3, 4, 5).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.dropout(x, 0.5)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
self.check_shape(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_dropout_cpu(self):
self._dropout_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_dropout_gpu(self):
self._dropout_helper(gpu_dev)
def _reduceSum_helper(self, dev):
X = np.random.randn(3, 4, 5).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.reduce_sum(x, None, 1)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_reduceSum_cpu(self):
self._reduceSum_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_reduceSum_gpu(self):
self._reduceSum_helper(gpu_dev)
def _reduceMean_helper(self, dev):
X = np.random.randn(3, 4, 5).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.reduce_mean(x, None, 1)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_reduceMean_cpu(self):
self._reduceMean_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_reduceMean_gpu(self):
self._reduceMean_helper(gpu_dev)
def _squeeze_helper(self, dev):
X = np.random.randn(3, 1, 2, 1, 1)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.squeeze(x, [1, 3, 4])
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_squeeze_cpu(self):
self._squeeze_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_squeeze_gpu(self):
self._squeeze_helper(gpu_dev)
def _unsqueeze_helper(self, dev):
X = np.random.randn(3, 2)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.unsqueeze(x, [2, 4, 5])
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_unsqueeze_cpu(self):
self._unsqueeze_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_unsqueeze_gpu(self):
self._unsqueeze_helper(gpu_dev)
def _slice_helper(self, dev):
X = np.random.randn(20, 10, 5).astype(np.float32)
starts, ends, axes, steps = [0, 0], [3, 10], [0, 1], [1, 1]
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.slice(x, starts, ends, axes, steps)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_slice_cpu(self):
self._slice_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_slice_gpu(self):
self._slice_helper(gpu_dev)
# # todo, we don't support muli outputs
# def _split_helper(self, dev):
# X = np.array([1., 2., 3., 4., 5., 6.]).astype(np.float32)
# x = tensor.from_numpy(X)
# x.to_device(dev)
# y = autograd.split(x, 0, (2, 4))
# # frontend
# model = sonnx.to_onnx([x], [*y])
# # print('The model is:\n{}'.format(model))
# # backend
# sg_ir = sonnx.prepare(model, device=dev)
# sg_ir.is_graph = True
# y_t = sg_ir.run([x])[0]
# np.testing.assert_array_almost_equal(tensor.to_numpy(y).shape, tensor.to_numpy(y_t).shape)
# def test_split_cpu(self):
# self._split_helper(cpu_dev)
# @unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
# def test_split_gpu(self):
# self._split_helper(gpu_dev)
def _gather_helper(self, dev):
X = np.array([0, 1, 2]).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.gather(x, 0, [0, 1, 3])
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_gather_cpu(self):
self._gather_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_gather_gpu(self):
self._gather_helper(gpu_dev)
def _tile_helper(self, dev):
X = np.array([0, 1, 2]).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.tile(x, [2, 2])
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_tile_cpu(self):
self._tile_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_tile_gpu(self):
self._tile_helper(gpu_dev)
def _nonzero_helper(self, dev):
X = np.array([[1, 0], [1, 1]]).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.nonzero(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_nonzero_cpu(self):
self._nonzero_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_nonzero_gpu(self):
self._nonzero_helper(gpu_dev)
def _cast_helper(self, dev):
X = np.array([[1, 0], [1, 1]]).astype(np.float32)
x = tensor.from_numpy(X)
x.to_device(dev)
y = autograd.cast(x, tensor.int32)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
np.testing.assert_array_almost_equal(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_cast_cpu(self):
self._cast_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_cast_gpu(self):
self._cast_helper(gpu_dev)
def _onehot_helper(self, dev):
axisValue = 1
on_value = 3
off_value = 1
output_type = np.float32
indices = np.array([[1, 9], [2, 4]], dtype=np.float32)
depth = np.array([10], dtype=np.float32)
values = np.array([off_value, on_value], dtype=output_type)
x = tensor.from_numpy(indices)
x.to_device(dev)
y = autograd.onehot(axisValue, x, depth, values)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x])
self.check_shape(
tensor.to_numpy(y).shape,
tensor.to_numpy(y_t[0]).shape)
def test_onehot_cpu(self):
self._onehot_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_onehot_gpu(self):
self._onehot_helper(gpu_dev)
def _inference_helper(self, dev):
x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
x.gaussian(0.0, 1.0)
conv1 = layer.Conv2d(1, 2)
conv2 = layer.Conv2d(1, 2)
class MyLayer(layer.Layer):
def __init__(self, conv1, conv2):
super(MyLayer, self).__init__()
self.conv1 = conv1
self.conv2 = conv2
def forward(self, inputs):
x = self.conv1(inputs)
x = self.conv2(x)
return x
y = MyLayer(conv1, conv2)(x)
x1 = conv1(x)
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
y_t = sg_ir.run([x], last_layers=-1)
np.testing.assert_array_almost_equal(tensor.to_numpy(x1),
tensor.to_numpy(y_t[0]),
decimal=5)
def test_inference_cpu(self):
self._inference_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_inference_gpu(self):
self._inference_helper(gpu_dev)
def _retraining_helper(self, dev):
# forward
x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
x.gaussian(0.0, 1.0)
class MyLayer(layer.Layer):
def __init__(self):
super(MyLayer, self).__init__()
self.conv1 = layer.Conv2d(1, 2)
self.conv2 = layer.Conv2d(1, 2)
def forward(self, inputs):
x = self.conv1(inputs)
x = self.conv2(x)
x = autograd.flatten(x)
return x
y = MyLayer()(x)
y_t = tensor.Tensor(shape=(2, 1), device=dev)
y_t.gaussian(0.0, 1.0)
loss = autograd.MeanSquareError(y_t)(y)[0]
# backward
sgd = opt.SGD(lr=0.01)
for p, gp in autograd.backward(loss):
sgd.apply(p.name, p, gp)
sgd.step()
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
# forward
y_o = sg_ir.run([x])[0]
# backward
loss = autograd.MeanSquareError(y_t)(y_o)[0]
sgd = opt.SGD(lr=0.01)
for p, gp in autograd.backward(loss):
sgd.apply(p.name, p, gp)
sgd.step()
def test_retraining_cpu(self):
self._retraining_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_retraining_gpu(self):
self._retraining_helper(gpu_dev)
def _transfer_learning_helper(self, dev):
# forward
x = tensor.Tensor(shape=(2, 3, 3, 3), device=dev)
x.gaussian(0.0, 1.0)
class MyLayer(layer.Layer):
def __init__(self):
super(MyLayer, self).__init__()
self.conv1 = layer.Conv2d(1, 2)
def forward(self, inputs):
x = self.conv1(inputs)
x = autograd.flatten(x)
return x
y = MyLayer()(x)
y_t = tensor.Tensor(shape=(2, 4), device=dev)
y_t.gaussian(0.0, 1.0)
loss = autograd.MeanSquareError(y_t)(y)[0]
# backward
sgd = opt.SGD(lr=0.01)
for p, gp in autograd.backward(loss):
sgd.apply(p.name, p, gp)
sgd.step()
# frontend
model = sonnx.to_onnx([x], [y])
# print('The model is:\n{}'.format(model))
# backend
sg_ir = sonnx.prepare(model, device=dev)
sg_ir.is_graph = True
# forward
class MyLayer2(layer.Layer):
def __init__(self, sg_ir):
super(MyLayer2, self).__init__()
self.sg_ir = sg_ir
for node, operator in self.sg_ir.layers:
self.__dict__[node.name] = operator
self.conv2 = layer.Conv2d(1, 2)
def forward(self, inputs):
x = self.sg_ir.run(inputs, last_layers=-1)[0]
x = self.conv2(inputs)
x = autograd.flatten(x)
return x
y_o = MyLayer()(x)
# backward
y_ot = tensor.Tensor(shape=(2, 1), device=dev)
y_ot.gaussian(0.0, 1.0)
loss = autograd.MeanSquareError(y_ot)(y_o)[0]
sgd = opt.SGD(lr=0.01)
for p, gp in autograd.backward(loss):
sgd.apply(p.name, p, gp)
sgd.step()
def test_transfer_learning_cpu(self):
self._transfer_learning_helper(cpu_dev)
@unittest.skipIf(not singa_api.USE_CUDA, 'CUDA is not enabled')
def test_transfer_learning_gpu(self):
self._transfer_learning_helper(gpu_dev)
if __name__ == '__main__':
unittest.main()
| 31.965226
| 102
| 0.532548
| 9,563
| 68,022
| 3.569277
| 0.033671
| 0.025078
| 0.051797
| 0.055372
| 0.784871
| 0.771746
| 0.763103
| 0.758035
| 0.753054
| 0.745525
| 0
| 0.036564
| 0.334171
| 68,022
| 2,127
| 103
| 31.980254
| 0.717074
| 0.098689
| 0
| 0.607299
| 0
| 0
| 0.022171
| 0
| 0
| 0
| 0
| 0.00047
| 0.047445
| 1
| 0.156204
| false
| 0
| 0.010219
| 0
| 0.173723
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
acd32f3c85dd9f3c68bef23d4418e64a857acce3
| 334
|
py
|
Python
|
tests/test_parse.py
|
elastic-coders/py-graphqlparser
|
c935d2782c224b6a70880eac09773a5d9d905e72
|
[
"BSD-3-Clause"
] | 35
|
2015-09-19T06:04:01.000Z
|
2021-11-04T04:39:17.000Z
|
tests/test_parse.py
|
jhgg/py-graphqlparser
|
b34d0a295a4c009e810380d95c15f2c39d250e1a
|
[
"BSD-3-Clause"
] | 1
|
2016-11-16T08:04:59.000Z
|
2016-11-16T08:04:59.000Z
|
tests/test_parse.py
|
jhgg/py-graphqlparser
|
b34d0a295a4c009e810380d95c15f2c39d250e1a
|
[
"BSD-3-Clause"
] | 5
|
2015-09-21T18:52:40.000Z
|
2021-02-09T10:02:18.000Z
|
import pytest
def test_parse_ok():
from graphql_parser import GraphQLParser
assert GraphQLParser.graphql_parse_string('{query {id}}')
def test_parse_bad():
from graphql_parser import GraphQLParser
with pytest.raises(GraphQLParser.GraphQLParseError):
assert GraphQLParser.graphql_parse_string('{query {id')
| 25.692308
| 63
| 0.769461
| 39
| 334
| 6.333333
| 0.461538
| 0.05668
| 0.097166
| 0.186235
| 0.647773
| 0.356275
| 0.356275
| 0
| 0
| 0
| 0
| 0
| 0.149701
| 334
| 12
| 64
| 27.833333
| 0.869718
| 0
| 0
| 0.25
| 0
| 0
| 0.065868
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.375
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
acd68ddebdf3f9400c2a4b29ce394f47de9aac40
| 153
|
py
|
Python
|
florin/pipelines/balsam.py
|
jeffkinnison/florin
|
94e76812e9fe27c86b2ce39313d07beb21c8b478
|
[
"MIT"
] | 6
|
2019-06-03T19:11:05.000Z
|
2021-01-13T06:35:43.000Z
|
florin/pipelines/balsam.py
|
jeffkinnison/florin
|
94e76812e9fe27c86b2ce39313d07beb21c8b478
|
[
"MIT"
] | 4
|
2019-06-10T14:48:15.000Z
|
2019-10-01T16:48:58.000Z
|
florin/pipelines/balsam.py
|
jeffkinnison/florin
|
94e76812e9fe27c86b2ce39313d07beb21c8b478
|
[
"MIT"
] | 1
|
2019-09-25T17:57:23.000Z
|
2019-09-25T17:57:23.000Z
|
from florin.pipelines.pipeline import Pipeline
class BalsamPipeline(Pipeline):
def run(self, data):
return next(map(self.operations, data))
| 25.5
| 47
| 0.738562
| 19
| 153
| 5.947368
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163399
| 153
| 5
| 48
| 30.6
| 0.882813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
acfb6bd89e2bf0508499b4010e890971ec5a8907
| 483
|
py
|
Python
|
tests/function.py
|
btalebali/pysphere
|
cda8bbc480f9942911fb8f9c7f3c5c9a4da8bd43
|
[
"Unlicense"
] | null | null | null |
tests/function.py
|
btalebali/pysphere
|
cda8bbc480f9942911fb8f9c7f3c5c9a4da8bd43
|
[
"Unlicense"
] | null | null | null |
tests/function.py
|
btalebali/pysphere
|
cda8bbc480f9942911fb8f9c7f3c5c9a4da8bd43
|
[
"Unlicense"
] | null | null | null |
import time
import threading
import os
import urllib2
import mmap
import sys, re, getpass, argparse, subprocess
from urlparse import urlparse
from time import sleep
from pysphere import VIServer, MORTypes
from pysphere import VIProperty, VITask,VIException, FaultTypes
from pysphere.vi_virtual_machine import VIVirtualMachine
from pysphere.resources import VimService_services as VI
from pysphere.vi_mor import VIMor
from pysphere import vi_task
from pysphere.ZSI import fault
| 24.15
| 64
| 0.84472
| 67
| 483
| 6.014925
| 0.507463
| 0.208437
| 0.133995
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002381
| 0.130435
| 483
| 19
| 65
| 25.421053
| 0.957143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.066667
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
4a13a8d6f28e4dc8e9d5a37f8b3c3da00c101eb7
| 238
|
py
|
Python
|
factory/SimpleFactory/autos/nullcar.py
|
Tomvictor/python-design-patterns
|
6b99607d721bbe03d26a0a451a10e88cd1c1d112
|
[
"MIT"
] | null | null | null |
factory/SimpleFactory/autos/nullcar.py
|
Tomvictor/python-design-patterns
|
6b99607d721bbe03d26a0a451a10e88cd1c1d112
|
[
"MIT"
] | null | null | null |
factory/SimpleFactory/autos/nullcar.py
|
Tomvictor/python-design-patterns
|
6b99607d721bbe03d26a0a451a10e88cd1c1d112
|
[
"MIT"
] | null | null | null |
from .abs_auto import AbsAuto
class NullCar(AbsAuto):
def __init__(self, carname):
self._carname = carname
def start(self):
print('Unknown car "%s".' % self._carname)
def stop(self):
pass
| 19.833333
| 51
| 0.588235
| 28
| 238
| 4.75
| 0.642857
| 0.24812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.302521
| 238
| 11
| 52
| 21.636364
| 0.801205
| 0
| 0
| 0
| 0
| 0
| 0.07489
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0.125
| 0.125
| 0
| 0.625
| 0.125
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
c57f2f6b17555bd27cc5116d33b1532aa71673a1
| 432
|
py
|
Python
|
apps/users/utils.py
|
ivall/IVmonitor
|
8a217cb3cc00a44d7c577ec61a90c77cc7c22959
|
[
"MIT"
] | 190
|
2021-02-06T10:47:54.000Z
|
2022-02-15T23:45:07.000Z
|
apps/users/utils.py
|
ivall/IVmonitor
|
8a217cb3cc00a44d7c577ec61a90c77cc7c22959
|
[
"MIT"
] | null | null | null |
apps/users/utils.py
|
ivall/IVmonitor
|
8a217cb3cc00a44d7c577ec61a90c77cc7c22959
|
[
"MIT"
] | null | null | null |
import bcrypt
def hash_password(password):
password = password.encode('utf-8')
hashed_password = bcrypt.hashpw(password, bcrypt.gensalt())
return hashed_password
def verify_password(password, hashed_password) -> bool:
password = password.encode('utf-8')
hashed_password = hashed_password.encode('utf-8')
if bcrypt.hashpw(password, hashed_password) == hashed_password:
return True
return False
| 28.8
| 67
| 0.729167
| 52
| 432
| 5.884615
| 0.326923
| 0.320261
| 0.287582
| 0.176471
| 0.261438
| 0.261438
| 0.261438
| 0
| 0
| 0
| 0
| 0.008333
| 0.166667
| 432
| 15
| 68
| 28.8
| 0.841667
| 0
| 0
| 0.181818
| 0
| 0
| 0.034642
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0.727273
| 0.090909
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
c5a64dacc825a75e32907ab62c914e397ff88004
| 5,426
|
py
|
Python
|
pytechfin/carol_sync_monitoring.py
|
jnefoussi/pytechfin
|
4d5bc44410b7161ab3acd65b2474468a84e576af
|
[
"MIT"
] | 4
|
2021-03-23T14:44:34.000Z
|
2021-04-22T19:21:52.000Z
|
pytechfin/carol_sync_monitoring.py
|
jnefoussi/pytechfin
|
4d5bc44410b7161ab3acd65b2474468a84e576af
|
[
"MIT"
] | 9
|
2021-03-24T14:45:31.000Z
|
2021-08-04T18:19:04.000Z
|
pytechfin/carol_sync_monitoring.py
|
jnefoussi/pytechfin
|
4d5bc44410b7161ab3acd65b2474468a84e576af
|
[
"MIT"
] | null | null | null |
from .misc import get_tenant_techfin
from .enums import EnumApps
class CarolSyncMonitoring:
def __init__(self, techfin):
self.techfin = techfin
def get_pks(self, dm_name, techfin_app, techfin_tenant=None, carol_tenant=None, page_size=1000,
page=1, debug=False, max_hits=None):
"""Get PKs from a data model
Args:
dm_name (str): Data model name
techfin_app (str): techfin app name.
techfin_tenant (str, optional): techfin tenant id. Defaults to None.
carol_tenant (str, optional): carol tenant name. Defaults to Nonte.
page_size (int, optional): number of records to get in each interation. Defaults to 1000.
page (int, optional): initial page to start to fetch the records.. Defaults to 1.
debug (bool, optional): show debug logs.
max_hits (int): Number of records to return.
Returns:
list: List of PKs
"""
max_hits = max_hits or float('inf')
if (techfin_tenant is None and carol_tenant is None):
techfin_tenant = self.techfin.techfin_tenant
if not EnumApps.exists_value(techfin_app):
raise ValueError(
f'techfin_app invalid. Value used" {techfin_app}. Check pytechfin.enums.EnumApps')
techfin_tenant_id = get_tenant_techfin(
techfin_tenant=techfin_tenant, carol_tenant=carol_tenant)
total_data = []
params = {
"dataModel": dm_name,
"page": page,
"pageSize": page_size
}
count = 0
while True:
data = self.techfin.call_api(path=f"provisioner/api/v1/carol-sync-monitoring/{techfin_tenant_id}/ids",
techfin_app=techfin_app, method='GET', params=params, )
if(len(data) == 0) or count>=max_hits:
break
count += len(data)
total_data.extend(data)
params['page'] += 1
if debug:
# TODO: use loggers?
print("total loaded: ", len(total_data), " &page=" +
str(page) + " &pageSize=" + str(page_size))
total_data = [d.replace('-', '') for d in total_data]
return total_data
def get_table_record_count(self, techfin_app, techfin_tenant=None, carol_tenant=None):
"""Get number of records per table in techfin
Args:
techfin_app (str): techfin app name.
techfin_tenant (str, optional): techfin tenant id. Defaults to None.
carol_tenant (str, optional): carol tenant name. Defaults to Nonte.
Returns:
list of dict: counts per data model.
"""
if not EnumApps.exists_value(techfin_app):
raise ValueError(
f'techfin_app invalid. Value used" {techfin_app}. Check pytechfin.enums.EnumApps')
techfin_tenant_id = get_tenant_techfin(
techfin_tenant=techfin_tenant, carol_tenant=carol_tenant)
r = self.techfin.call_api(path=f'provisioner/api/v1/carol-sync-monitoring/{techfin_tenant_id}/table-record-count',
method='GET', techfin_app=techfin_app)
return r
def get_data_by_pk(self, dm_name, techfin_app, pk_list, techfin_tenant=None, carol_tenant=None, page_size=1000, page=1, debug=False, return_dataframe=True):
"""Get PKs from a data model
Args:
dm_name (str): Data model name
techfin_app (str): techfin app name.
pk_list (list): list of keys to get.
techfin_tenant (str, optional): techfin tenant id. Defaults to None.
carol_tenant (str, optional): carol tenant name. Defaults to Nonte.
page_size (int, optional): number of records to get in each interation. Defaults to 1000.
page (int, optional): initial page to start to fetch the records.. Defaults to 1.
debug (bool, optional): show debug logs.
return_dataframe (bool, optional): Return a pandas DataFrame
Returns:
(list of dict, pd.DataFrame):
"""
if (techfin_tenant is None and carol_tenant is None):
techfin_tenant = self.techfin.techfin_tenant
if not EnumApps.exists_value(techfin_app):
raise ValueError(
f'techfin_app invalid. Value used" {techfin_app}. Check pytechfin.enums.EnumApps')
techfin_tenant_id = get_tenant_techfin(
techfin_tenant=techfin_tenant, carol_tenant=carol_tenant)
total_data = []
params = {
"page": page,
"pageSize": page_size
}
while True:
data = self.techfin.call_api(path=f"provisioner/api/v1/datamodel/{techfin_tenant_id}/{dm_name}",
techfin_app=techfin_app, method='POST', params=params, data=pk_list, )
if(len(data) == 0):
break
total_data.extend(data)
params['page'] += 1
if debug:
# TODO: use loggers?
print("total loaded: ", len(total_data), " &page=" +
str(page) + " &pageSize=" + str(page_size))
if return_dataframe:
import pandas as pd
return pd.DataFrame(total_data)
return total_data
| 37.680556
| 160
| 0.589937
| 653
| 5,426
| 4.709035
| 0.171516
| 0.114146
| 0.043902
| 0.01561
| 0.765203
| 0.705366
| 0.705366
| 0.705366
| 0.688455
| 0.688455
| 0
| 0.00759
| 0.320125
| 5,426
| 143
| 161
| 37.944056
| 0.825969
| 0.279027
| 0
| 0.605634
| 0
| 0
| 0.152365
| 0.075083
| 0
| 0
| 0
| 0.013986
| 0
| 1
| 0.056338
| false
| 0
| 0.042254
| 0
| 0.169014
| 0.028169
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c5af56e271b048fe5cdaec6606415f3e92c079de
| 119
|
py
|
Python
|
locale/pot/api/plotting/_autosummary/pyvista-themes-ParaViewTheme-transparent_background-1.py
|
tkoyama010/pyvista-doc-translations
|
23bb813387b7f8bfe17e86c2244d5dd2243990db
|
[
"MIT"
] | 4
|
2020-08-07T08:19:19.000Z
|
2020-12-04T09:51:11.000Z
|
locale/pot/api/plotting/_autosummary/pyvista-themes-DarkTheme-transparent_background-1.py
|
tkoyama010/pyvista-doc-translations
|
23bb813387b7f8bfe17e86c2244d5dd2243990db
|
[
"MIT"
] | 19
|
2020-08-06T00:24:30.000Z
|
2022-03-30T19:22:24.000Z
|
locale/pot/api/plotting/_autosummary/pyvista-themes-ParaViewTheme-transparent_background-1.py
|
tkoyama010/pyvista-doc-translations
|
23bb813387b7f8bfe17e86c2244d5dd2243990db
|
[
"MIT"
] | 1
|
2021-03-09T07:50:40.000Z
|
2021-03-09T07:50:40.000Z
|
# Set transparent_background globally to ``True``.
#
import pyvista
pyvista.global_theme.transparent_background = True
| 23.8
| 50
| 0.815126
| 14
| 119
| 6.714286
| 0.714286
| 0.446809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10084
| 119
| 4
| 51
| 29.75
| 0.878505
| 0.403361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c5b483cde2c8eac7b9c01cfb905cfdafb462ee88
| 50
|
py
|
Python
|
python/lib/Lib/site-packages/django/contrib/sitemaps/tests/__init__.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
django/contrib/sitemaps/tests/__init__.py
|
mradziej/django
|
5d38965743a369981c9a738a298f467f854a2919
|
[
"BSD-3-Clause"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
django/contrib/sitemaps/tests/__init__.py
|
mradziej/django
|
5d38965743a369981c9a738a298f467f854a2919
|
[
"BSD-3-Clause"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
from django.contrib.sitemaps.tests.basic import *
| 25
| 49
| 0.82
| 7
| 50
| 5.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 50
| 1
| 50
| 50
| 0.891304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c5bf9d92dd13fa710d30a9d56f21bc8df40e7e68
| 8,385
|
py
|
Python
|
tests/testcases/test_main.py
|
L-Net-1992/towhee
|
471de97bf9c5443efaf3b62fd440b3ebdb6d5903
|
[
"Apache-2.0"
] | null | null | null |
tests/testcases/test_main.py
|
L-Net-1992/towhee
|
471de97bf9c5443efaf3b62fd440b3ebdb6d5903
|
[
"Apache-2.0"
] | null | null | null |
tests/testcases/test_main.py
|
L-Net-1992/towhee
|
471de97bf9c5443efaf3b62fd440b3ebdb6d5903
|
[
"Apache-2.0"
] | null | null | null |
# coding : UTF-8
from operator import methodcaller
from test_image_embedding import *
from test_pipeline import *
from test_audio_embedding import *
def pipeline_register():
pipeline_names = ["image-embedding", "towhee/image-embedding-efficientnetb5",
"towhee/image-embedding-efficientnetb7", "towhee/image-embedding-resnet101",
"towhee/image-embedding-swinbase", "towhee/image-embedding-swinlarge",
"towhee/image-embedding-vitlarge", "towhee/audio-embedding-clmr",
"towhee/audio-embedding-vggish"]
return pipeline_names
def pipeline_runner():
invalid_pipeline_obj = TestPipelineInvalid()
for func in dir(TestPipelineInvalid):
if not func.startswith("__"):
print("Testing %s" % func)
res = methodcaller(func)(invalid_pipeline_obj)
if res == None:
print("%s PASS" % func)
else:
print("%s FAIL" % func)
pipeline_names = pipeline_register()
for pipeline_name in pipeline_names:
valid_pipeline_obj = TestPipelineValid()
for func in dir(TestPipelineValid):
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name)(valid_pipeline_obj)
if res == None:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
return True
def image_class_pipeline_register():
# skip efficientnetb7 image pipeline for memory shortage
# pipeline_names = ["image-embedding", "towhee/image-embedding-efficientnetb5",
# "towhee/image-embedding-efficientnetb7", "towhee/image-embedding-resnet101",
# "towhee/image-embedding-resnet50", "towhee/image-embedding-swinbase",
# "towhee/image-embedding-swinlarge", "towhee/image-embedding-vitlarge"]
# embedding_sizes = [2048, 2048, 2560, 2048, 2048, 1024, 1536, 1024]
pipeline_names = ["image-embedding", "towhee/image-embedding-efficientnetb5",
"towhee/image-embedding-resnet101", "towhee/image-embedding-resnet50",
"towhee/image-embedding-swinbase", "towhee/image-embedding-swinlarge",
"towhee/image-embedding-vitlarge"]
embedding_sizes = [2048, 2048, 2048, 2048, 1024, 1536, 1024]
# skip multiple threads tests for memory shortage
skipped_cases = ["test_embedding_concurrent_multi_threads", "test_embedding_more_times", "test_embedding_avg_time"]
return pipeline_names, embedding_sizes, skipped_cases
def image_class_pipeline_runner():
pipeline_names, embedding_sizes, skipped_cases = image_class_pipeline_register()
for (pipeline_name, embedding_size_each) in zip(pipeline_names, embedding_sizes):
invalid_embedding_obj = TestImageEmbeddingInvalid()
for func in dir(TestImageEmbeddingInvalid):
if func in skipped_cases:
continue
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name)(invalid_embedding_obj)
if res == 1:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
valid_embedding_obj = TestImageEmbeddingValid()
for func in dir(TestImageEmbeddingValid):
if func in skipped_cases:
continue
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name, embedding_size_each)(valid_embedding_obj)
if res == 1:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
test_valid_embedding = TestImageEmbeddingStress()
for func in dir(TestImageEmbeddingStress):
if func in skipped_cases:
continue
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name, embedding_size_each)(test_valid_embedding)
if res == 1:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
test_valid_embedding_per = TestImageEmbeddingPerformance()
for func in dir(TestImageEmbeddingPerformance):
if func in skipped_cases:
continue
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name, embedding_size_each)(test_valid_embedding_per)
if res == 1:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
return True
def audio_class_pipeline_register():
# skip clmr audio pipeline for memory shortage
# pipeline_names = ["towhee/audio-embedding-clmr", "towhee/audio-embedding-vggish"]
# embedding_sizes = [512, 128]
pipeline_names = ["towhee/audio-embedding-vggish"]
embedding_sizes = [128]
# skip multiple threads tests for memory shortage
skipped_cases = ["test_embedding_concurrent_multi_threads", "test_embedding_more_times", "test_embedding_avg_time"]
return pipeline_names, embedding_sizes, skipped_cases
def audio_class_pipeline_runner():
pipeline_names, embedding_sizes, skipped_cases = audio_class_pipeline_register()
for (pipeline_name, embedding_size_each) in zip(pipeline_names, embedding_sizes):
invalid_embedding_obj = TestAudioEmbeddingInvalid()
for func in dir(TestAudioEmbeddingInvalid):
if func in skipped_cases:
continue
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name)(invalid_embedding_obj)
if res == 1:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
valid_embedding_obj = TestAudioEmbeddingValid()
for func in dir(TestAudioEmbeddingValid):
if func in skipped_cases:
continue
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name, embedding_size_each)(valid_embedding_obj)
if res == 1:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
test_valid_embedding = TestAudioEmbeddingStress()
for func in dir(TestAudioEmbeddingStress):
if func in skipped_cases:
continue
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name, embedding_size_each)(test_valid_embedding)
if res == 1:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
test_valid_embedding_per = TestAudioEmbeddingPerformance()
for func in dir(TestAudioEmbeddingPerformance):
if func in skipped_cases:
continue
if not func.startswith("__"):
print("Testing %s:%s" % (func, pipeline_name))
res = methodcaller(func, pipeline_name, embedding_size_each)(test_valid_embedding_per)
if res == 1:
print("%s:%s PASS" % (func, pipeline_name))
else:
print("%s:%s FAIL" % (func, pipeline_name))
return True
def test_caller():
pipeline_runner()
image_class_pipeline_runner()
# skip audio tests for issue 463
# audio_class_pipeline_runner()
return True
if __name__ == '__main__':
test_caller()
| 41.305419
| 119
| 0.599761
| 865
| 8,385
| 5.559538
| 0.10289
| 0.097318
| 0.119775
| 0.024953
| 0.762944
| 0.745477
| 0.721148
| 0.704928
| 0.684134
| 0.660012
| 0
| 0.017018
| 0.299225
| 8,385
| 202
| 120
| 41.509901
| 0.801396
| 0.095289
| 0
| 0.641892
| 0
| 0
| 0.136418
| 0.086319
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047297
| false
| 0.067568
| 0.027027
| 0
| 0.121622
| 0.202703
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c5eb56216eb68156c93b75afbd81803fa965682d
| 91
|
py
|
Python
|
Estimator/test_estimator.py
|
afafelwafi/TweetsPopularity
|
158d5f76ac4a963b0af3eec9a29da51cd95fe0e5
|
[
"MIT"
] | 1
|
2022-01-07T17:44:40.000Z
|
2022-01-07T17:44:40.000Z
|
Estimator/test_estimator.py
|
afafelwafi/TweetsPopularity
|
158d5f76ac4a963b0af3eec9a29da51cd95fe0e5
|
[
"MIT"
] | null | null | null |
Estimator/test_estimator.py
|
afafelwafi/TweetsPopularity
|
158d5f76ac4a963b0af3eec9a29da51cd95fe0e5
|
[
"MIT"
] | null | null | null |
# Import testing package
import pytest
#Import estimator
from estimator import Estimator
| 13
| 31
| 0.824176
| 11
| 91
| 6.818182
| 0.545455
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 91
| 6
| 32
| 15.166667
| 0.974026
| 0.417582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c5efa1526e10edc31dbdc597bd55fa9ee3934b2b
| 40
|
py
|
Python
|
src/CryptoLibrary/utils/__init__.py
|
rfabbris/robotframework-crypto
|
c93364b36ae68a44e4b717d3b3402b4169ee6750
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-11-12T14:02:01.000Z
|
2021-01-06T03:54:44.000Z
|
src/CryptoLibrary/utils/__init__.py
|
rfabbris/robotframework-crypto
|
c93364b36ae68a44e4b717d3b3402b4169ee6750
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/CryptoLibrary/utils/__init__.py
|
rfabbris/robotframework-crypto
|
c93364b36ae68a44e4b717d3b3402b4169ee6750
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from .cryptoutility import CryptoUtility
| 40
| 40
| 0.9
| 4
| 40
| 9
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a891e10b8bc00f2d817a05ed9cf62cd259175144
| 46
|
py
|
Python
|
06_string/00_string.py
|
hemuke/python
|
bc99f2b5aee997083ae31f59a2b33db48c8255f3
|
[
"Apache-2.0"
] | null | null | null |
06_string/00_string.py
|
hemuke/python
|
bc99f2b5aee997083ae31f59a2b33db48c8255f3
|
[
"Apache-2.0"
] | null | null | null |
06_string/00_string.py
|
hemuke/python
|
bc99f2b5aee997083ae31f59a2b33db48c8255f3
|
[
"Apache-2.0"
] | null | null | null |
#! /root/anaconda3/bin/python
print(dir(str))
| 15.333333
| 29
| 0.717391
| 7
| 46
| 4.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 0.065217
| 46
| 2
| 30
| 23
| 0.744186
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
a8aeeb8918a04382678422fe9f8240c275a1516d
| 45
|
py
|
Python
|
run.py
|
chall68/BlackWatch
|
0b95d69e4b7de9213a031557e9aff54ce35b12dd
|
[
"MIT"
] | null | null | null |
run.py
|
chall68/BlackWatch
|
0b95d69e4b7de9213a031557e9aff54ce35b12dd
|
[
"MIT"
] | null | null | null |
run.py
|
chall68/BlackWatch
|
0b95d69e4b7de9213a031557e9aff54ce35b12dd
|
[
"MIT"
] | null | null | null |
from BlackWatch import restAPI
restAPI.run()
| 15
| 30
| 0.822222
| 6
| 45
| 6.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 45
| 2
| 31
| 22.5
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a8cbef8bdb4de5c1216b138f0cb9fc99e2051dec
| 130
|
py
|
Python
|
bslparloursite/videolibrary/admin.py
|
natfarleydev/thebslparlour
|
ebb2588282cdb2a977ec6c5f8d82cec4e8fd1f99
|
[
"CC0-1.0"
] | 1
|
2016-01-06T23:13:11.000Z
|
2016-01-06T23:13:11.000Z
|
bslparloursite/videolibrary/admin.py
|
natfarleydev/thebslparlour
|
ebb2588282cdb2a977ec6c5f8d82cec4e8fd1f99
|
[
"CC0-1.0"
] | 4
|
2021-03-18T20:15:04.000Z
|
2021-06-10T17:52:31.000Z
|
bslparloursite/videolibrary/admin.py
|
natfarleydev/thebslparlour
|
ebb2588282cdb2a977ec6c5f8d82cec4e8fd1f99
|
[
"CC0-1.0"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import SourceVideo
admin.site.register(SourceVideo)
| 16.25
| 32
| 0.807692
| 17
| 130
| 6.176471
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130769
| 130
| 7
| 33
| 18.571429
| 0.929204
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
763f1ea9f6bdd6bb0882ec413d42e2cb1ba34a26
| 6,642
|
py
|
Python
|
tests/sources/python/4_worker_in_master_cooperative/src/modules/test_objects.py
|
ramonamela/compss
|
3b36b4264ac5f58476f5b89a452d9200b4702020
|
[
"Apache-2.0"
] | 31
|
2018-03-06T09:30:03.000Z
|
2022-03-23T09:51:05.000Z
|
tests/sources/python/4_worker_in_master_cooperative/src/modules/test_objects.py
|
ramonamela/compss
|
3b36b4264ac5f58476f5b89a452d9200b4702020
|
[
"Apache-2.0"
] | 4
|
2017-10-25T12:20:52.000Z
|
2019-03-20T14:17:40.000Z
|
tests/sources/python/4_worker_in_master_cooperative/src/modules/test_objects.py
|
mF2C/COMPSs
|
a74d97346121382a8a40ca15fa619e6e4cea917f
|
[
"Apache-2.0"
] | 15
|
2018-06-07T10:03:27.000Z
|
2022-02-23T14:59:42.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
PyCOMPSs Testbench Tasks
========================
"""
# Imports
import unittest
from modules.utils import verify_line
from pycompss.api.api import compss_barrier
from pycompss.api.task import task
from pycompss.api.parameter import INOUT
from pycompss.api.constraint import constraint
PARALLEL_TEST_COUNT = 20
INITIAL_CONTENT = "This is the initial content of the file"
UPDATED_CONTENT_1 = "This is the updated content 1 of the file"
class StringWrapper(object):
"""
Object class shared among tasks.
"""
def __init__(self):
self.value = None
@task(returns=1)
def create_object_with_content(content):
"""
Creates a new StringWrapper with the content passed in.
"""
return_sw = StringWrapper()
return_sw.value = content
return return_sw
@constraint(processor_architecture="master")
@task(returns=1)
def create_object_with_content_master(content):
"""
Creates a new StringWrapper with the content passed in.
"""
return_sw = StringWrapper()
return_sw.value = content
return return_sw
@constraint(processor_architecture="worker")
@task(returns=1)
def create_object_with_content_worker(content):
"""
Creates a new StringWrapper with the content passed in.
"""
return_sw = StringWrapper()
return_sw.value = content
return return_sw
@constraint(processor_architecture="worker", processor_name="MainProcessor01")
@task(returns=1)
def create_object_with_content_worker01(content):
"""
Creates a new StringWrapper with the content passed in.
"""
return_sw = StringWrapper()
return_sw.value = content
return return_sw
@constraint(processor_architecture="worker", processor_name="MainProcessor02")
@task(returns=1)
def create_object_with_content_worker02(content):
"""
Creates a new StringWrapper with the content passed in.
"""
return_sw = StringWrapper()
return_sw.value = content
return return_sw
@task()
def check_object_with_content(content, input_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value.
"""
line = input_sw.value
verify_line(line, content)
@constraint(processor_architecture="master")
@task()
def check_object_with_content_master(content, input_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value.
"""
line = input_sw.value
verify_line(line, content)
@constraint(processor_architecture="worker")
@task()
def check_object_with_content_worker(content, input_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value.
"""
line = input_sw.value
verify_line(line, content)
@constraint(processor_architecture="worker", processor_name="MainProcessor01")
@task()
def check_object_with_content_worker01(content, input_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value.
"""
line = input_sw.value
verify_line(line, content)
@constraint(processor_architecture="worker", processor_name="MainProcessor02")
@task()
def check_object_with_content_worker02(content, input_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value.
"""
line = input_sw.value
verify_line(line, content)
@task(inout_sw=INOUT)
def check_and_update_object_with_content(content, new_content, inout_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value and updates its value.
"""
line = inout_sw.value
verify_line(line, content)
inout_sw.value = new_content
@constraint(processor_architecture="master")
@task(inout_sw=INOUT)
def check_and_update_object_with_content_master(content, new_content, inout_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value and updates its value.
"""
line = inout_sw.value
verify_line(line, content)
inout_sw.value = new_content
@constraint(processor_architecture="worker")
@task(inout_sw=INOUT)
def check_and_update_object_with_content_worker(content, new_content, inout_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value and updates its value.
"""
line = inout_sw.value
verify_line(line, content)
inout_sw.value = new_content
@constraint(processor_architecture="worker", processor_name="MainProcessor01")
@task(inout_sw=INOUT)
def check_and_update_object_with_content_worker01(content, new_content, inout_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value and updates its value.
"""
line = inout_sw.value
verify_line(line, content)
inout_sw.value = new_content
@constraint(processor_architecture="worker", processor_name="MainProcessor02")
@task(inout_sw=INOUT)
def check_and_update_object_with_content_worker02(content, new_content, inout_sw):
"""
Verifies that the content of the StringWrapper on the path matches
the expected value and updates its value.
"""
line = inout_sw.value
verify_line(line, content)
inout_sw.value = new_content
class TestObjects(unittest.TestCase):
"""
Unit Test verifying the execution of a task passing in object parameters
"""
def test_master_producer_worker_consumer_object(self):
print("Master produces object, worker consumes")
stringwrapper = create_object_with_content_master(INITIAL_CONTENT)
check_object_with_content_worker(INITIAL_CONTENT, stringwrapper)
compss_barrier()
print("\t OK")
def test_worker_producer_master_consumer_object(self):
print("Worker produces object, master consumes")
stringwrapper = create_object_with_content_worker(INITIAL_CONTENT)
check_object_with_content_master(INITIAL_CONTENT, stringwrapper)
compss_barrier()
print("\t OK")
def test_master_producer_worker_consumer_master_updates_object(self):
print("Master produces object, several workers consume, master updates, worker reads")
stringwrapper = create_object_with_content_master(INITIAL_CONTENT)
for i in range(0, PARALLEL_TEST_COUNT):
check_object_with_content_worker(INITIAL_CONTENT, stringwrapper)
check_and_update_object_with_content(INITIAL_CONTENT, UPDATED_CONTENT_1, stringwrapper)
check_object_with_content_worker(UPDATED_CONTENT_1, stringwrapper)
compss_barrier()
print("\t OK")
| 29.918919
| 95
| 0.737428
| 848
| 6,642
| 5.508255
| 0.117925
| 0.04924
| 0.083708
| 0.036395
| 0.84179
| 0.816528
| 0.720831
| 0.720831
| 0.631985
| 0.617427
| 0
| 0.006775
| 0.177808
| 6,642
| 221
| 96
| 30.054299
| 0.848563
| 0.220114
| 0
| 0.663793
| 0
| 0
| 0.084513
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163793
| false
| 0
| 0.051724
| 0
| 0.275862
| 0.051724
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
765b97e5de87c20dc02cf3651eb35adefa466a23
| 90
|
py
|
Python
|
etrobosim/comm/__init__.py
|
YoshitakaAtarashi/ETroboSimController
|
fe7821794217e099e565b9e514ae5efdd452bd59
|
[
"MIT"
] | null | null | null |
etrobosim/comm/__init__.py
|
YoshitakaAtarashi/ETroboSimController
|
fe7821794217e099e565b9e514ae5efdd452bd59
|
[
"MIT"
] | null | null | null |
etrobosim/comm/__init__.py
|
YoshitakaAtarashi/ETroboSimController
|
fe7821794217e099e565b9e514ae5efdd452bd59
|
[
"MIT"
] | null | null | null |
from .ETroboSimClient import ETroboSimClient
from .ETroboSimServer import ETroboSimServer
| 30
| 44
| 0.888889
| 8
| 90
| 10
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 90
| 2
| 45
| 45
| 0.97561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
76634f22acc30163b962103ee3f1b894e2a833bd
| 108
|
py
|
Python
|
dudendas/exception.py
|
eikendev/dudendas
|
b03074deac55e4fb2eed105d2685a19c21651b2e
|
[
"MIT"
] | null | null | null |
dudendas/exception.py
|
eikendev/dudendas
|
b03074deac55e4fb2eed105d2685a19c21651b2e
|
[
"MIT"
] | null | null | null |
dudendas/exception.py
|
eikendev/dudendas
|
b03074deac55e4fb2eed105d2685a19c21651b2e
|
[
"MIT"
] | null | null | null |
class DudendasException(Exception):
pass
class DudendasArgumentException(DudendasException):
pass
| 15.428571
| 51
| 0.796296
| 8
| 108
| 10.75
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 108
| 6
| 52
| 18
| 0.934783
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
769548074a798e406f0b6353447326aef507674c
| 201
|
py
|
Python
|
tests/records/models.py
|
chan-dra/django-rest-framework-oauth
|
b85afd29e4bbc85697edabab9644edc5b4fe60de
|
[
"MIT"
] | 87
|
2016-01-24T16:41:02.000Z
|
2021-12-20T21:13:24.000Z
|
tests/records/models.py
|
chan-dra/django-rest-framework-oauth
|
b85afd29e4bbc85697edabab9644edc5b4fe60de
|
[
"MIT"
] | 16
|
2020-02-11T23:19:19.000Z
|
2022-03-11T23:33:40.000Z
|
tests/records/models.py
|
chan-dra/django-rest-framework-oauth
|
b85afd29e4bbc85697edabab9644edc5b4fe60de
|
[
"MIT"
] | 57
|
2016-02-02T05:46:14.000Z
|
2021-03-21T15:46:06.000Z
|
from django.db import models
class Record(models.Model):
account = models.ForeignKey('accounts.Account', blank=True, null=True)
owner = models.ForeignKey('users.User', blank=True, null=True)
| 28.714286
| 74
| 0.736318
| 27
| 201
| 5.481481
| 0.62963
| 0.216216
| 0.175676
| 0.22973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129353
| 201
| 6
| 75
| 33.5
| 0.845714
| 0
| 0
| 0
| 0
| 0
| 0.129353
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
769a5eb0a642f3bb9bb29bc207b7d679cab5f15c
| 1,052
|
py
|
Python
|
wrappers/python/tests/pool/test_close_pool_ledger.py
|
absltkaos/indy-sdk
|
bc14c5b514dc1c76ce62dd7f6bf804120bf69f5e
|
[
"Apache-2.0"
] | null | null | null |
wrappers/python/tests/pool/test_close_pool_ledger.py
|
absltkaos/indy-sdk
|
bc14c5b514dc1c76ce62dd7f6bf804120bf69f5e
|
[
"Apache-2.0"
] | null | null | null |
wrappers/python/tests/pool/test_close_pool_ledger.py
|
absltkaos/indy-sdk
|
bc14c5b514dc1c76ce62dd7f6bf804120bf69f5e
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from indy import pool, error
# noinspection PyUnusedLocal
@pytest.mark.asyncio
@pytest.mark.parametrize("pool_handle_cleanup", [False])
async def test_close_pool_ledger_works(pool_handle, pool_handle_cleanup):
await pool.close_pool_ledger(pool_handle)
# noinspection PyUnusedLocal
@pytest.mark.asyncio
@pytest.mark.parametrize("pool_handle_cleanup", [False])
async def test_close_pool_ledger_works_for_twice(pool_handle, pool_handle_cleanup):
await pool.close_pool_ledger(pool_handle)
with pytest.raises(error.PoolLedgerInvalidPoolHandle):
await pool.close_pool_ledger(pool_handle)
# noinspection PyUnusedLocal
@pytest.mark.asyncio
@pytest.mark.parametrize("pool_handle_cleanup", [False])
async def test_close_pool_ledger_works_for_reopen_after_close(pool_name, pool_config, pool_handle, pool_handle_cleanup):
await pool.close_pool_ledger(pool_handle)
pool_handle = await pool.open_pool_ledger(pool_name, pool_config)
assert pool_handle is not None
await pool.close_pool_ledger(pool_handle)
| 30.941176
| 120
| 0.819392
| 146
| 1,052
| 5.520548
| 0.239726
| 0.198511
| 0.148883
| 0.111663
| 0.754342
| 0.754342
| 0.754342
| 0.712159
| 0.712159
| 0.712159
| 0
| 0
| 0.102662
| 1,052
| 33
| 121
| 31.878788
| 0.853814
| 0.076046
| 0
| 0.578947
| 0
| 0
| 0.058884
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 1
| 0
| false
| 0
| 0.105263
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
76c7173e2ccaafa957a5dd7eac6f0bdfe7279b31
| 98
|
py
|
Python
|
client/clients/admin.py
|
kim-chae-yeon/My.CL
|
2ca236e1791197ee331a6740bf7b5b75147fc995
|
[
"MIT"
] | null | null | null |
client/clients/admin.py
|
kim-chae-yeon/My.CL
|
2ca236e1791197ee331a6740bf7b5b75147fc995
|
[
"MIT"
] | 8
|
2021-09-26T18:50:19.000Z
|
2021-12-09T14:38:47.000Z
|
client/clients/admin.py
|
kim-chae-yeon/My.CL
|
2ca236e1791197ee331a6740bf7b5b75147fc995
|
[
"MIT"
] | 2
|
2021-12-02T12:46:11.000Z
|
2021-12-11T13:31:50.000Z
|
from django.contrib import admin
from .models import CategoryLog
admin.site.register(CategoryLog)
| 24.5
| 32
| 0.846939
| 13
| 98
| 6.384615
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091837
| 98
| 4
| 33
| 24.5
| 0.932584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4f1126cd58c14e070802a99c54699b51c34b21c9
| 77
|
py
|
Python
|
Singleton_Python3/UserSingletonOne.py
|
weaponsX/PythonSingleton
|
994d89936f5fa4a90fd3b37e13a787305e9af668
|
[
"Apache-2.0"
] | null | null | null |
Singleton_Python3/UserSingletonOne.py
|
weaponsX/PythonSingleton
|
994d89936f5fa4a90fd3b37e13a787305e9af668
|
[
"Apache-2.0"
] | null | null | null |
Singleton_Python3/UserSingletonOne.py
|
weaponsX/PythonSingleton
|
994d89936f5fa4a90fd3b37e13a787305e9af668
|
[
"Apache-2.0"
] | null | null | null |
# 使用SingletonOne
from SingletonOne import singleton_one
singleton_one.foo()
| 15.4
| 38
| 0.844156
| 9
| 77
| 7
| 0.777778
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 77
| 5
| 39
| 15.4
| 0.913043
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4f140875a2b3fbd8862661c37381b9289bc32f47
| 43
|
py
|
Python
|
src/conftest.py
|
mberth/ecv-analytics
|
a3f9d90fd22f888517fd7f51037bdea3ef420832
|
[
"MIT"
] | null | null | null |
src/conftest.py
|
mberth/ecv-analytics
|
a3f9d90fd22f888517fd7f51037bdea3ef420832
|
[
"MIT"
] | 5
|
2020-06-21T09:36:08.000Z
|
2021-12-13T20:51:45.000Z
|
src/conftest.py
|
mberth/ecv-analytics
|
a3f9d90fd22f888517fd7f51037bdea3ef420832
|
[
"MIT"
] | null | null | null |
# see https://stackoverflow.com/a/50610630
| 21.5
| 42
| 0.767442
| 6
| 43
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.069767
| 43
| 1
| 43
| 43
| 0.625
| 0.930233
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4f17390e9d999edd05f46498bc2b42f5228c9d66
| 48
|
pyw
|
Python
|
Aboutn/__pycache__/About-n-no-X.pyw
|
AkiraDemenech/About-n-Squares
|
4a2d8644cf6672f109aac81583954645b36da553
|
[
"MIT"
] | 1
|
2020-10-05T17:31:57.000Z
|
2020-10-05T17:31:57.000Z
|
Aboutn/__pycache__/About-n-no-X.pyw
|
AkiraDemenech/About-n-Squares
|
4a2d8644cf6672f109aac81583954645b36da553
|
[
"MIT"
] | null | null | null |
Aboutn/__pycache__/About-n-no-X.pyw
|
AkiraDemenech/About-n-Squares
|
4a2d8644cf6672f109aac81583954645b36da553
|
[
"MIT"
] | null | null | null |
from Aboutn import iniciar
iniciar(fechar=False)
| 24
| 26
| 0.854167
| 7
| 48
| 5.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 2
| 27
| 24
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4f3d2145569e0e3b3784d3b774e5aecc263b6fd9
| 32
|
py
|
Python
|
AsyncLibrary/__init__.py
|
nolivaldeziii/robotframework-async
|
79bbd921f2b08a8000aa24b237083d95a06558e6
|
[
"MIT"
] | null | null | null |
AsyncLibrary/__init__.py
|
nolivaldeziii/robotframework-async
|
79bbd921f2b08a8000aa24b237083d95a06558e6
|
[
"MIT"
] | null | null | null |
AsyncLibrary/__init__.py
|
nolivaldeziii/robotframework-async
|
79bbd921f2b08a8000aa24b237083d95a06558e6
|
[
"MIT"
] | null | null | null |
from .async import AsyncLibrary
| 16
| 31
| 0.84375
| 4
| 32
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4f3f326ccc16ea52535f9df8cb58784145d4571c
| 128
|
py
|
Python
|
handler/admin.py
|
nolan-dyke/capstone_backend
|
0aeb3850fcd9b53fb51104d80892e42fe7683519
|
[
"MIT"
] | null | null | null |
handler/admin.py
|
nolan-dyke/capstone_backend
|
0aeb3850fcd9b53fb51104d80892e42fe7683519
|
[
"MIT"
] | null | null | null |
handler/admin.py
|
nolan-dyke/capstone_backend
|
0aeb3850fcd9b53fb51104d80892e42fe7683519
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import User, Flashcard
admin.site.register(User)
admin.site.register(Flashcard)
| 18.285714
| 35
| 0.8125
| 18
| 128
| 5.777778
| 0.555556
| 0.173077
| 0.326923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101563
| 128
| 6
| 36
| 21.333333
| 0.904348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4f7235a170740a88a04f25c0fbef8de9d9904c9b
| 52
|
py
|
Python
|
oauth2/__init__.py
|
amigus/python3-demo-api
|
e1af352a545cc861fdb5e2175c12e9449f7fd16b
|
[
"MIT"
] | 1
|
2019-12-10T12:18:42.000Z
|
2019-12-10T12:18:42.000Z
|
oauth2/__init__.py
|
amigus/python3-demo-api
|
e1af352a545cc861fdb5e2175c12e9449f7fd16b
|
[
"MIT"
] | null | null | null |
oauth2/__init__.py
|
amigus/python3-demo-api
|
e1af352a545cc861fdb5e2175c12e9449f7fd16b
|
[
"MIT"
] | null | null | null |
from .client import OAuth2Client, OAuth2ClientError
| 26
| 51
| 0.865385
| 5
| 52
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 0.096154
| 52
| 1
| 52
| 52
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4f8c23eb3a620e3012182f29099cfb01e9a8a8c0
| 242
|
py
|
Python
|
schema/admin.py
|
leVirve-arxiv/OuO
|
9a6a1ef50e6aeef8d0b84d1a1a377e5f19050ac2
|
[
"MIT"
] | null | null | null |
schema/admin.py
|
leVirve-arxiv/OuO
|
9a6a1ef50e6aeef8d0b84d1a1a377e5f19050ac2
|
[
"MIT"
] | null | null | null |
schema/admin.py
|
leVirve-arxiv/OuO
|
9a6a1ef50e6aeef8d0b84d1a1a377e5f19050ac2
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from schema.models import Field, Mapping, Template, Graph, Member
admin.site.register(Member)
admin.site.register(Field)
admin.site.register(Mapping)
admin.site.register(Template)
admin.site.register(Graph)
| 24.2
| 65
| 0.81405
| 34
| 242
| 5.794118
| 0.411765
| 0.228426
| 0.431472
| 0.233503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078512
| 242
| 9
| 66
| 26.888889
| 0.883408
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
96cad9a06860f0f2de5292ea68f355d948b7037b
| 91
|
py
|
Python
|
entsoe/exceptions.py
|
duizendnegen/entsoe-py
|
e62b8ec93dd02bacdac58d02c3c3bc5195b80b43
|
[
"MIT"
] | 1
|
2019-02-08T21:26:54.000Z
|
2019-02-08T21:26:54.000Z
|
entsoe/exceptions.py
|
duizendnegen/entsoe-py
|
e62b8ec93dd02bacdac58d02c3c3bc5195b80b43
|
[
"MIT"
] | null | null | null |
entsoe/exceptions.py
|
duizendnegen/entsoe-py
|
e62b8ec93dd02bacdac58d02c3c3bc5195b80b43
|
[
"MIT"
] | null | null | null |
class PaginationError(Exception):
pass
class NoMatchingDataError(Exception):
pass
| 15.166667
| 37
| 0.769231
| 8
| 91
| 8.75
| 0.625
| 0.371429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 91
| 6
| 38
| 15.166667
| 0.921053
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
96d4801143ca66c6b76d49d867ae6ce556e687d5
| 20
|
py
|
Python
|
cloud/__init__.py
|
pmp47/Cloud
|
2fd63df4f92d90508653ea76a37d55c2bd8a7ecc
|
[
"MIT"
] | null | null | null |
cloud/__init__.py
|
pmp47/Cloud
|
2fd63df4f92d90508653ea76a37d55c2bd8a7ecc
|
[
"MIT"
] | null | null | null |
cloud/__init__.py
|
pmp47/Cloud
|
2fd63df4f92d90508653ea76a37d55c2bd8a7ecc
|
[
"MIT"
] | null | null | null |
from .cloud import *
| 20
| 20
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
96d7dcb28be6897240e38b786132c74686d6209c
| 313
|
py
|
Python
|
octicons16px/square_fill.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | 1
|
2021-01-28T06:47:39.000Z
|
2021-01-28T06:47:39.000Z
|
octicons16px/square_fill.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
octicons16px/square_fill.py
|
andrewp-as-is/octicons16px.py
|
1272dc9f290619d83bd881e87dbd723b0c48844c
|
[
"Unlicense"
] | null | null | null |
OCTICON_SQUARE_FILL = """
<svg class="octicon octicon-square-fill" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16"><path fill-rule="evenodd" d="M5.75 4A1.75 1.75 0 004 5.75v4.5c0 .966.784 1.75 1.75 1.75h4.5A1.75 1.75 0 0012 10.25v-4.5A1.75 1.75 0 0010.25 4h-4.5z"></path></svg>
"""
| 62.6
| 281
| 0.670927
| 70
| 313
| 2.971429
| 0.571429
| 0.072115
| 0.096154
| 0.086538
| 0.086538
| 0
| 0
| 0
| 0
| 0
| 0
| 0.30466
| 0.108626
| 313
| 4
| 282
| 78.25
| 0.44086
| 0
| 0
| 0
| 0
| 0.333333
| 0.907051
| 0.070513
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
96eede1d127ee3004c98478ede13ed64a8d31293
| 1,218
|
py
|
Python
|
tests/testWordGen.py
|
EmidioLP/CharQ
|
7fb857c4481458ce5d09741d78bf0513d44af130
|
[
"MIT"
] | null | null | null |
tests/testWordGen.py
|
EmidioLP/CharQ
|
7fb857c4481458ce5d09741d78bf0513d44af130
|
[
"MIT"
] | 1
|
2021-03-16T19:11:36.000Z
|
2021-03-16T19:12:18.000Z
|
tests/testWordGen.py
|
EmidioLP/CharQ
|
7fb857c4481458ce5d09741d78bf0513d44af130
|
[
"MIT"
] | 2
|
2021-03-16T19:03:43.000Z
|
2021-03-16T20:10:11.000Z
|
import unittest
from sys import path
path.append('..')
from charq.charq import WordGenerate
teste = WordGenerate()
class TestWordGenerate(unittest.TestCase):
def test_class(self):
self.assertEqual(type(teste.val), str)
self.assertEqual(teste.val, 'CharQ')
def test_word(self):
self.assertEqual(len(teste.word(12)), 12)
self.assertEqual(teste.word().islower(), True)
self.assertEqual(teste.word(case='up').isupper(), True)
self.assertEqual(teste.word(case='camel').islower(), False)
self.assertEqual(teste.word(case='camel').isupper(), False)
def test_num(self):
self.assertEqual(type(teste.num(typen='str')), str)
self.assertEqual(type(teste.num()), int)
self.assertEqual(len(teste.num(tam=12, typen='str')), 12)
def test_passw(self):
self.assertEqual(type(teste.passw()), str)
self.assertEqual(len(teste.passw(12)), 12)
if __name__ == '__name__':
unittest.main()
"""
def test_word(self):
self.assertEqual(type(teste.val), str)
def test_word(self):
self.assertEqual(type(teste.val), str)
def test_word(self):
self.assertEqual(type(teste.val), str)
"""
| 25.914894
| 67
| 0.646962
| 155
| 1,218
| 4.987097
| 0.23871
| 0.291074
| 0.172057
| 0.217335
| 0.500647
| 0.389392
| 0.218629
| 0.174644
| 0.174644
| 0.174644
| 0
| 0.012195
| 0.192118
| 1,218
| 46
| 68
| 26.478261
| 0.773374
| 0
| 0
| 0
| 1
| 0
| 0.033233
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.166667
| false
| 0.125
| 0.125
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
96f95a0dfa48d5427abdd81aca8ca7ce20352483
| 112
|
py
|
Python
|
leetcode977.py
|
AmitHasanShuvo/Programming
|
f47ecc626e518a0bf5f9f749afd15ce67bbe737b
|
[
"MIT"
] | 8
|
2019-05-26T19:24:13.000Z
|
2021-03-24T17:36:14.000Z
|
leetcode977.py
|
AmitHasanShuvo/Programming
|
f47ecc626e518a0bf5f9f749afd15ce67bbe737b
|
[
"MIT"
] | null | null | null |
leetcode977.py
|
AmitHasanShuvo/Programming
|
f47ecc626e518a0bf5f9f749afd15ce67bbe737b
|
[
"MIT"
] | 1
|
2020-04-19T04:59:54.000Z
|
2020-04-19T04:59:54.000Z
|
class Solution:
def sortedSquares(self, A: List[int]) -> List[int]:
return sorted(x * x for x in A)
| 28
| 55
| 0.616071
| 18
| 112
| 3.833333
| 0.722222
| 0.202899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 112
| 3
| 56
| 37.333333
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
8c0217b95391132d370a26f38b1cc5e34f30baa6
| 316
|
py
|
Python
|
dashboard/views.py
|
NazmusShakib/django-p1
|
2b25d7dbbaf8c42aa2e7d66949e2879a94516b0b
|
[
"MIT"
] | null | null | null |
dashboard/views.py
|
NazmusShakib/django-p1
|
2b25d7dbbaf8c42aa2e7d66949e2879a94516b0b
|
[
"MIT"
] | 9
|
2020-02-12T00:18:04.000Z
|
2022-02-10T10:38:45.000Z
|
dashboard/views.py
|
NazmusShakib/django-p1
|
2b25d7dbbaf8c42aa2e7d66949e2879a94516b0b
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
@login_required(login_url="/")
def dashboard(request):
return render(request, 'dashboard.html')
@login_required
def mailbox(request):
return render(request, 'mailbox.html')
| 22.571429
| 57
| 0.787975
| 40
| 316
| 6.125
| 0.475
| 0.122449
| 0.146939
| 0.212245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 316
| 13
| 58
| 24.307692
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0.085443
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0.222222
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
8c5507b9d1bb55728e4f4a251e8820e706b1a799
| 656
|
py
|
Python
|
SAT/base_element.py
|
ktodorov/uva-kr-19
|
36780a42cde1df2cf827dc7c4e239c649650bf4e
|
[
"MIT"
] | null | null | null |
SAT/base_element.py
|
ktodorov/uva-kr-19
|
36780a42cde1df2cf827dc7c4e239c649650bf4e
|
[
"MIT"
] | null | null | null |
SAT/base_element.py
|
ktodorov/uva-kr-19
|
36780a42cde1df2cf827dc7c4e239c649650bf4e
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
class BaseElement(ABC):
# @abstractmethod
# def initialize_from_string(self, text):
# pass
@abstractmethod
def is_correct(self) -> bool:
pass
@abstractmethod
def is_empty(self) -> bool:
pass
@abstractmethod
def contains_empty_clause(self, levels_further = 1) -> bool:
pass
@abstractmethod
def get_literal_string(self) -> str:
pass
@abstractmethod
def get_sign(self) -> bool:
pass
@abstractmethod
def get_number(self) -> int:
pass
@abstractmethod
def has_value(self) -> bool:
pass
| 18.742857
| 64
| 0.609756
| 70
| 656
| 5.542857
| 0.414286
| 0.350515
| 0.378866
| 0.257732
| 0.304124
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002193
| 0.304878
| 656
| 35
| 65
| 18.742857
| 0.848684
| 0.097561
| 0
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.304348
| false
| 0.304348
| 0.043478
| 0
| 0.391304
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
8b25f6037e053a751271ff053c18e773fd258c8f
| 205
|
py
|
Python
|
settings.py
|
RafaelDamiani/python-flask-api
|
ddf442791b751675a1a1782c67542f97b04e1265
|
[
"MIT"
] | null | null | null |
settings.py
|
RafaelDamiani/python-flask-api
|
ddf442791b751675a1a1782c67542f97b04e1265
|
[
"MIT"
] | null | null | null |
settings.py
|
RafaelDamiani/python-flask-api
|
ddf442791b751675a1a1782c67542f97b04e1265
|
[
"MIT"
] | null | null | null |
from flask import Flask
import json
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///c:/projects/python-flask-api/database.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
| 29.285714
| 92
| 0.785366
| 28
| 205
| 5.464286
| 0.678571
| 0.143791
| 0.248366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078049
| 205
| 7
| 93
| 29.285714
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8b344fc4e6614be3bab884930babe230edfee6ab
| 34
|
py
|
Python
|
healthcare/backends/djhealth/__init__.py
|
caktus/rapidsms-healthcare
|
0effdb2036129702c15530510633561d0c43d6d4
|
[
"BSD-3-Clause"
] | 9
|
2015-08-31T09:22:28.000Z
|
2019-04-27T04:06:00.000Z
|
healthcare/backends/djhealth/__init__.py
|
caktus/rapidsms-healthcare
|
0effdb2036129702c15530510633561d0c43d6d4
|
[
"BSD-3-Clause"
] | null | null | null |
healthcare/backends/djhealth/__init__.py
|
caktus/rapidsms-healthcare
|
0effdb2036129702c15530510633561d0c43d6d4
|
[
"BSD-3-Clause"
] | 7
|
2015-09-17T00:56:39.000Z
|
2020-03-14T11:08:17.000Z
|
from .storage import DjangoStorage
| 34
| 34
| 0.882353
| 4
| 34
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8b493414de2402e2f074913fbdba387290a60066
| 57
|
py
|
Python
|
pentest-scripts/learning-python-for-forensics/Chapter 8/plugins/__init__.py
|
paulveillard/cybersecurity-penetration-testing
|
a5afff13ec25afd0cf16ef966d35bddb91518af4
|
[
"Apache-2.0"
] | 6
|
2021-12-07T21:02:12.000Z
|
2022-03-03T12:08:14.000Z
|
pentest-scripts/learning-python-for-forensics/Chapter 8/plugins/__init__.py
|
paulveillard/cybersecurity-penetration-testing
|
a5afff13ec25afd0cf16ef966d35bddb91518af4
|
[
"Apache-2.0"
] | null | null | null |
pentest-scripts/learning-python-for-forensics/Chapter 8/plugins/__init__.py
|
paulveillard/cybersecurity-penetration-testing
|
a5afff13ec25afd0cf16ef966d35bddb91518af4
|
[
"Apache-2.0"
] | 1
|
2022-01-15T23:57:36.000Z
|
2022-01-15T23:57:36.000Z
|
import exif_parser
import id3_parser
import office_parser
| 19
| 20
| 0.912281
| 9
| 57
| 5.444444
| 0.555556
| 0.489796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019231
| 0.087719
| 57
| 3
| 20
| 19
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8ce37c42961ca61895c0b159b62e2adb5ff12073
| 37
|
py
|
Python
|
tests/__init__.py
|
Gokender/minotor
|
81a9dd11183fbabfdf0810050636c774cfe00416
|
[
"MIT"
] | 3
|
2021-06-19T06:06:47.000Z
|
2021-07-31T23:40:45.000Z
|
tests/__init__.py
|
Gokender/minotorr
|
70ecfbae089d94b7967bdbc01a47a64b79b66bca
|
[
"MIT"
] | 1
|
2020-07-10T17:03:53.000Z
|
2020-07-13T08:58:34.000Z
|
tests/__init__.py
|
Gokender/minotorr
|
70ecfbae089d94b7967bdbc01a47a64b79b66bca
|
[
"MIT"
] | null | null | null |
"""Unit test package for minotor."""
| 18.5
| 36
| 0.675676
| 5
| 37
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 37
| 1
| 37
| 37
| 0.78125
| 0.810811
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.