hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
637bd5b6a0fc52cd1b8316822afe9b47e0d46a7b
| 118
|
py
|
Python
|
recrawler/recrawl/__init__.py
|
Andyccs/sport-news-retrieval
|
12aad1f5c2436af4decd5cacbc25d4909d4551e0
|
[
"MIT"
] | 6
|
2016-04-12T06:25:59.000Z
|
2017-05-23T18:10:40.000Z
|
recrawler/recrawl/__init__.py
|
Andyccs/sport-news-retrieval
|
12aad1f5c2436af4decd5cacbc25d4909d4551e0
|
[
"MIT"
] | 3
|
2016-04-14T07:29:12.000Z
|
2016-04-14T16:12:06.000Z
|
recrawler/recrawl/__init__.py
|
Andyccs/sport-news-retrieval
|
12aad1f5c2436af4decd5cacbc25d4909d4551e0
|
[
"MIT"
] | 2
|
2016-04-25T18:43:16.000Z
|
2020-03-17T15:20:41.000Z
|
import os
import sys
sys.path.append(os.path.abspath('../crawler'))
sys.path.append(os.path.abspath('../classifier'))
| 23.6
| 49
| 0.728814
| 18
| 118
| 4.777778
| 0.444444
| 0.162791
| 0.302326
| 0.348837
| 0.604651
| 0.604651
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 118
| 4
| 50
| 29.5
| 0.767857
| 0
| 0
| 0
| 0
| 0
| 0.194915
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
63c4ddd562e7cb0305de3a123c4349f94c56207c
| 55
|
py
|
Python
|
backend/src/shared/domain_model_abc.py
|
tuod/bikerides
|
ef5dfd25c6642f740c36e68cb4548b5c80d7ab44
|
[
"Apache-2.0"
] | null | null | null |
backend/src/shared/domain_model_abc.py
|
tuod/bikerides
|
ef5dfd25c6642f740c36e68cb4548b5c80d7ab44
|
[
"Apache-2.0"
] | 6
|
2021-06-20T20:20:14.000Z
|
2021-06-21T21:33:05.000Z
|
backend/src/shared/domain_model_abc.py
|
tuod/bikerides
|
ef5dfd25c6642f740c36e68cb4548b5c80d7ab44
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC
class DomainModel(ABC):
pass
| 9.166667
| 23
| 0.709091
| 8
| 55
| 4.875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.236364
| 55
| 5
| 24
| 11
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
9845a762afb8d3bded276c8da7b79d3d68cd90a0
| 43
|
py
|
Python
|
Configuration/AlCa/python/GlobalTag_condDBv2.py
|
gputtley/cmssw
|
c1ef8454804e4ebea8b65f59c4a952a6c94fde3b
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
Configuration/AlCa/python/GlobalTag_condDBv2.py
|
gputtley/cmssw
|
c1ef8454804e4ebea8b65f59c4a952a6c94fde3b
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
Configuration/AlCa/python/GlobalTag_condDBv2.py
|
gputtley/cmssw
|
c1ef8454804e4ebea8b65f59c4a952a6c94fde3b
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
from Configuration.AlCa.GlobalTag import *
| 21.5
| 42
| 0.837209
| 5
| 43
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
986e9873f3cf0907380c9590738c7700e8fbd298
| 31
|
py
|
Python
|
rozet/__init__.py
|
RozetProtocol/rozetPythonSdk
|
ced086ec42e9963136fee08f76a4361406ed9f64
|
[
"MIT"
] | null | null | null |
rozet/__init__.py
|
RozetProtocol/rozetPythonSdk
|
ced086ec42e9963136fee08f76a4361406ed9f64
|
[
"MIT"
] | null | null | null |
rozet/__init__.py
|
RozetProtocol/rozetPythonSdk
|
ced086ec42e9963136fee08f76a4361406ed9f64
|
[
"MIT"
] | null | null | null |
from .rozet import Rozet, Badge
| 31
| 31
| 0.806452
| 5
| 31
| 5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7f74f032998303e51fd412520ab34653773127a3
| 10,859
|
py
|
Python
|
devel/lib/python2.7/dist-packages/plutodrone/srv/_PlutoPilot.py
|
EveVengerov/Gesture-Controlling-Drone
|
8fe38dbfdc496472e13e76bcdb55b471f51b42ea
|
[
"MIT"
] | 2
|
2021-09-22T19:06:19.000Z
|
2021-09-22T20:22:40.000Z
|
devel/lib/python2.7/dist-packages/plutodrone/srv/_PlutoPilot.py
|
EveVengerov/Gesture-Controlling-Drone
|
8fe38dbfdc496472e13e76bcdb55b471f51b42ea
|
[
"MIT"
] | null | null | null |
devel/lib/python2.7/dist-packages/plutodrone/srv/_PlutoPilot.py
|
EveVengerov/Gesture-Controlling-Drone
|
8fe38dbfdc496472e13e76bcdb55b471f51b42ea
|
[
"MIT"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from plutodrone/PlutoPilotRequest.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class PlutoPilotRequest(genpy.Message):
_md5sum = "f8dca44e14493750e1a83a8276b2d8e4"
_type = "plutodrone/PlutoPilotRequest"
_has_header = False # flag to mark the presence of a Header object
_full_text = """
int32 roll
int32 pitch
int32 yaw
float32 accX
float32 accY
float32 accZ
float32 gyroX
float32 gyroY
float32 gyroZ
float32 magX
float32 magY
float32 magZ
float32 alt
float32 battery
int32 rssi
"""
__slots__ = ['roll','pitch','yaw','accX','accY','accZ','gyroX','gyroY','gyroZ','magX','magY','magZ','alt','battery','rssi']
_slot_types = ['int32','int32','int32','float32','float32','float32','float32','float32','float32','float32','float32','float32','float32','float32','int32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
roll,pitch,yaw,accX,accY,accZ,gyroX,gyroY,gyroZ,magX,magY,magZ,alt,battery,rssi
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(PlutoPilotRequest, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.roll is None:
self.roll = 0
if self.pitch is None:
self.pitch = 0
if self.yaw is None:
self.yaw = 0
if self.accX is None:
self.accX = 0.
if self.accY is None:
self.accY = 0.
if self.accZ is None:
self.accZ = 0.
if self.gyroX is None:
self.gyroX = 0.
if self.gyroY is None:
self.gyroY = 0.
if self.gyroZ is None:
self.gyroZ = 0.
if self.magX is None:
self.magX = 0.
if self.magY is None:
self.magY = 0.
if self.magZ is None:
self.magZ = 0.
if self.alt is None:
self.alt = 0.
if self.battery is None:
self.battery = 0.
if self.rssi is None:
self.rssi = 0
else:
self.roll = 0
self.pitch = 0
self.yaw = 0
self.accX = 0.
self.accY = 0.
self.accZ = 0.
self.gyroX = 0.
self.gyroY = 0.
self.gyroZ = 0.
self.magX = 0.
self.magY = 0.
self.magZ = 0.
self.alt = 0.
self.battery = 0.
self.rssi = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3i11fi().pack(_x.roll, _x.pitch, _x.yaw, _x.accX, _x.accY, _x.accZ, _x.gyroX, _x.gyroY, _x.gyroZ, _x.magX, _x.magY, _x.magZ, _x.alt, _x.battery, _x.rssi))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
_x = self
start = end
end += 60
(_x.roll, _x.pitch, _x.yaw, _x.accX, _x.accY, _x.accZ, _x.gyroX, _x.gyroY, _x.gyroZ, _x.magX, _x.magY, _x.magZ, _x.alt, _x.battery, _x.rssi,) = _get_struct_3i11fi().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3i11fi().pack(_x.roll, _x.pitch, _x.yaw, _x.accX, _x.accY, _x.accZ, _x.gyroX, _x.gyroY, _x.gyroZ, _x.magX, _x.magY, _x.magZ, _x.alt, _x.battery, _x.rssi))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
_x = self
start = end
end += 60
(_x.roll, _x.pitch, _x.yaw, _x.accX, _x.accY, _x.accZ, _x.gyroX, _x.gyroY, _x.gyroZ, _x.magX, _x.magY, _x.magZ, _x.alt, _x.battery, _x.rssi,) = _get_struct_3i11fi().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_3i11fi = None
def _get_struct_3i11fi():
global _struct_3i11fi
if _struct_3i11fi is None:
_struct_3i11fi = struct.Struct("<3i11fi")
return _struct_3i11fi
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from plutodrone/PlutoPilotResponse.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class PlutoPilotResponse(genpy.Message):
_md5sum = "c7a7b135453cda7e71490802dabf7edd"
_type = "plutodrone/PlutoPilotResponse"
_has_header = False # flag to mark the presence of a Header object
_full_text = """
int32 rcRoll
int32 rcPitch
int32 rcYaw
int32 rcThrottle
int32 rcAUX1
int32 rcAUX2
int32 rcAUX3
int32 rcAUX4
"""
__slots__ = ['rcRoll','rcPitch','rcYaw','rcThrottle','rcAUX1','rcAUX2','rcAUX3','rcAUX4']
_slot_types = ['int32','int32','int32','int32','int32','int32','int32','int32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
rcRoll,rcPitch,rcYaw,rcThrottle,rcAUX1,rcAUX2,rcAUX3,rcAUX4
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(PlutoPilotResponse, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.rcRoll is None:
self.rcRoll = 0
if self.rcPitch is None:
self.rcPitch = 0
if self.rcYaw is None:
self.rcYaw = 0
if self.rcThrottle is None:
self.rcThrottle = 0
if self.rcAUX1 is None:
self.rcAUX1 = 0
if self.rcAUX2 is None:
self.rcAUX2 = 0
if self.rcAUX3 is None:
self.rcAUX3 = 0
if self.rcAUX4 is None:
self.rcAUX4 = 0
else:
self.rcRoll = 0
self.rcPitch = 0
self.rcYaw = 0
self.rcThrottle = 0
self.rcAUX1 = 0
self.rcAUX2 = 0
self.rcAUX3 = 0
self.rcAUX4 = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_8i().pack(_x.rcRoll, _x.rcPitch, _x.rcYaw, _x.rcThrottle, _x.rcAUX1, _x.rcAUX2, _x.rcAUX3, _x.rcAUX4))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
_x = self
start = end
end += 32
(_x.rcRoll, _x.rcPitch, _x.rcYaw, _x.rcThrottle, _x.rcAUX1, _x.rcAUX2, _x.rcAUX3, _x.rcAUX4,) = _get_struct_8i().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_8i().pack(_x.rcRoll, _x.rcPitch, _x.rcYaw, _x.rcThrottle, _x.rcAUX1, _x.rcAUX2, _x.rcAUX3, _x.rcAUX4))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
_x = self
start = end
end += 32
(_x.rcRoll, _x.rcPitch, _x.rcYaw, _x.rcThrottle, _x.rcAUX1, _x.rcAUX2, _x.rcAUX3, _x.rcAUX4,) = _get_struct_8i().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_8i = None
def _get_struct_8i():
global _struct_8i
if _struct_8i is None:
_struct_8i = struct.Struct("<8i")
return _struct_8i
class PlutoPilot(object):
_type = 'plutodrone/PlutoPilot'
_md5sum = '4694157b8edbc1fcf473057bcd528de1'
_request_class = PlutoPilotRequest
_response_class = PlutoPilotResponse
| 33.82866
| 193
| 0.652638
| 1,534
| 10,859
| 4.447849
| 0.117992
| 0.021984
| 0.03371
| 0.03283
| 0.748498
| 0.743661
| 0.743661
| 0.738531
| 0.723289
| 0.723289
| 0
| 0.037666
| 0.224975
| 10,859
| 320
| 194
| 33.934375
| 0.773051
| 0.233815
| 0
| 0.663677
| 1
| 0
| 0.124937
| 0.021936
| 0
| 0
| 0.002521
| 0
| 0
| 1
| 0.071749
| false
| 0
| 0.035874
| 0
| 0.237668
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7f8ca6924d467738b16bc48a7cff1ed8e459031a
| 82
|
py
|
Python
|
demo_impl/webui/api_blueprint/api_blueprint.py
|
JFF-Bohdan/vrc_t70_demo
|
32c9630f3a216d1ddd98d67f36515d2aa0d15569
|
[
"MIT"
] | null | null | null |
demo_impl/webui/api_blueprint/api_blueprint.py
|
JFF-Bohdan/vrc_t70_demo
|
32c9630f3a216d1ddd98d67f36515d2aa0d15569
|
[
"MIT"
] | null | null | null |
demo_impl/webui/api_blueprint/api_blueprint.py
|
JFF-Bohdan/vrc_t70_demo
|
32c9630f3a216d1ddd98d67f36515d2aa0d15569
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
api_blueprint = Blueprint("API Blueprint", __name__)
| 20.5
| 52
| 0.804878
| 10
| 82
| 6.1
| 0.6
| 0.393443
| 0.688525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 82
| 3
| 53
| 27.333333
| 0.847222
| 0
| 0
| 0
| 0
| 0
| 0.158537
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
f68f620ea3a111655ec40cf46b6f086949bfbbb2
| 109
|
py
|
Python
|
solas/_config/__init__.py
|
domoritz/solas
|
23878fed9efbf14781791dafec26705c6762cfd1
|
[
"Apache-2.0"
] | 2
|
2021-11-09T17:54:43.000Z
|
2022-01-28T15:38:58.000Z
|
solas/_config/__init__.py
|
domoritz/solas
|
23878fed9efbf14781791dafec26705c6762cfd1
|
[
"Apache-2.0"
] | 27
|
2021-04-08T18:58:49.000Z
|
2021-10-11T02:33:35.000Z
|
solas/_config/__init__.py
|
domoritz/solas
|
23878fed9efbf14781791dafec26705c6762cfd1
|
[
"Apache-2.0"
] | 1
|
2021-06-18T02:41:48.000Z
|
2021-06-18T02:41:48.000Z
|
from solas._config import config
from solas._config.config import warning_format
from .config import Config
| 21.8
| 47
| 0.844037
| 16
| 109
| 5.5625
| 0.375
| 0.404494
| 0.337079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119266
| 109
| 4
| 48
| 27.25
| 0.927083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f6a089f30af7ee171a6f4cb00e71b15196cc548a
| 147
|
py
|
Python
|
losses/__init__.py
|
kilsenp/person-multi-task-dataset
|
2f186cafa3db2c77d8c6c4309b2cadc13d4f92ab
|
[
"MIT"
] | 4
|
2020-10-08T03:31:36.000Z
|
2021-03-06T08:06:23.000Z
|
reid/scripts/triplet_reid/losses/__init__.py
|
VisualComputingInstitute/CROWDBOT_perception
|
df98f3f658c39fb3fa4ac0456f1214f7918009f6
|
[
"MIT"
] | 7
|
2021-06-08T20:55:10.000Z
|
2022-02-10T00:38:32.000Z
|
reid/scripts/triplet_reid/losses/__init__.py
|
VisualComputingInstitute/CROWDBOT_perception
|
df98f3f658c39fb3fa4ac0456f1214f7918009f6
|
[
"MIT"
] | null | null | null |
choices = ["BatchHard", "BatchSoft", "BatchHardWithSoftmax", "BatchHardSingleWithSoftmax", "BatchHardWithJunkSigmoid", "BatchHardWithJunkSoftmax"]
| 73.5
| 146
| 0.809524
| 7
| 147
| 17
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054422
| 147
| 1
| 147
| 147
| 0.856115
| 0
| 0
| 0
| 0
| 0
| 0.761905
| 0.503401
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f6f210ef2ba8b79122ff4cea474e0b99a890db5c
| 70
|
py
|
Python
|
115/indents.py
|
xtakacsx/bitesofpy
|
91487cbf58af5eb58a0343ff2231a90c1032acb0
|
[
"MIT"
] | 1
|
2020-01-10T00:05:34.000Z
|
2020-01-10T00:05:34.000Z
|
115/indents.py
|
xtakacsx/bitesofpy
|
91487cbf58af5eb58a0343ff2231a90c1032acb0
|
[
"MIT"
] | null | null | null |
115/indents.py
|
xtakacsx/bitesofpy
|
91487cbf58af5eb58a0343ff2231a90c1032acb0
|
[
"MIT"
] | null | null | null |
def count_indents(text):
return len(text) - len(text.lstrip(' '))
| 23.333333
| 44
| 0.657143
| 10
| 70
| 4.5
| 0.7
| 0.311111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157143
| 70
| 2
| 45
| 35
| 0.762712
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
63f765122ae8aa2a3b8c72b21f9f38debc8ff17d
| 6,393
|
py
|
Python
|
src/FFEAT/test/strategies/selection/ElitismTest.py
|
PatrikValkovic/MasterThesis
|
6e9f3b186541db6c8395ebc96ace7289d01c805b
|
[
"MIT"
] | null | null | null |
src/FFEAT/test/strategies/selection/ElitismTest.py
|
PatrikValkovic/MasterThesis
|
6e9f3b186541db6c8395ebc96ace7289d01c805b
|
[
"MIT"
] | null | null | null |
src/FFEAT/test/strategies/selection/ElitismTest.py
|
PatrikValkovic/MasterThesis
|
6e9f3b186541db6c8395ebc96ace7289d01c805b
|
[
"MIT"
] | null | null | null |
###############################
#
# Created by Patrik Valkovic
# 3/14/2021
#
###############################
import unittest
import torch as t
import ffeat
from ffeat.strategies import selection
from ffeat.utils import decay
from test.repeat import repeat
class ElitismTest(unittest.TestCase):
def test_should_work(self):
s = selection.Elitism(10, selection.Tournament())
pop, fitness = t.rand((100,60)), t.randn((100,))
(newpop,), kargs = s(fitness, pop)
self.assertEqual(newpop.shape, (100,60))
def test_should_keep_1(self):
s = selection.Elitism(1, lambda *_, **__: ((t.rand((100,60)) + 10,), __))
pop, fitness = t.rand((100,60)), t.randn((100,))
(newpop,), kargs = s(fitness, pop)
self.assertEqual(newpop.shape, (100,60))
self.assertEqual(t.count_nonzero(t.all(newpop == pop, dim=-1)), 1)
def test_should_keep_10(self):
s = selection.Elitism(10, lambda *_, **__: ((t.rand((100,60)) + 10,), __))
pop, fitness = t.rand((100,60)), t.randn((100,))
(newpop,), kargs = s(fitness, pop)
self.assertEqual(newpop.shape, (100,60))
self.assertEqual(t.count_nonzero(t.all(newpop == pop, dim=-1)), 10)
def test_should_keep_1_percentage(self):
s = selection.Elitism(0.01, lambda *_, **__: ((t.rand((100,60)) + 10,), __))
pop, fitness = t.rand((100,60)), t.randn((100,))
(newpop,), kargs = s(fitness, pop)
self.assertEqual(newpop.shape, (100,60))
self.assertEqual(t.count_nonzero(t.all(newpop == pop, dim=-1)), 1)
def test_should_keep_16_percentage(self):
s = selection.Elitism(0.16, lambda *_, **__: ((t.rand((100,60)) + 10,), __))
pop, fitness = t.rand((100,60)), t.randn((100,))
(newpop,), kargs = s(fitness, pop)
self.assertEqual(newpop.shape, (100,60))
self.assertEqual(t.count_nonzero(t.all(newpop == pop, dim=-1)), 16)
def test_should_keep_1_maximization(self):
s = selection.Elitism(1, lambda *_, **__: ((t.rand((100,60)) + 10,), __), maximization=True)
pop, fitness = t.rand((100,60)), t.randn((100,))
best_index = t.argmax(fitness)
(newpop,), kargs = s(fitness, pop)
self.assertEqual(newpop.shape, (100,60))
self.assertEqual(t.count_nonzero(t.all(newpop == pop, dim=-1)), 1)
self.assertTrue(t.all(t.abs(newpop[best_index] - pop[best_index]) < 1e-9))
def test_should_keep_10_maximization(self):
s = selection.Elitism(10, lambda *_, **__: ((t.rand((100,60)) + 10,), __), maximization=True)
pop, fitness = t.rand((100,60)), t.randn((100,))
q = t.quantile(fitness, 0.9)
best_indices = t.where(fitness >= q)[0]
self.assertEqual(len(best_indices), 10)
(newpop,), kargs = s(fitness, pop)
self.assertEqual(newpop.shape, (100,60))
self.assertEqual(t.count_nonzero(t.all(newpop == pop, dim=-1)), 10)
for bi in best_indices:
self.assertTrue(t.all(t.abs(newpop[bi] - pop[bi]) < 1e-9))
@unittest.skipIf(not t.cuda.is_available(), 'CUDA not available')
def test_should_keep_16_percentage_cuda(self):
s = selection.Elitism(0.16, lambda *_, **__: ((t.rand((100,60)) + 10,), __))
pop, fitness = t.rand((100,60)), t.randn((100,))
(newpop,), kargs = s(fitness, pop)
self.assertEqual(newpop.shape, (100,60))
self.assertEqual(t.count_nonzero(t.all(newpop == pop, dim=-1)), 16)
def test_fraction_callback(self):
s = selection.Elitism(decay.Linear(0.1, 0.01), selection.Tournament())
pop, fitness = t.rand((100,60)), t.randn((100,))
(newpop,), kargs = s(fitness, pop, iteration=13, max_iteration=23)
self.assertEqual(newpop.shape, (100,60))
def test_absolute_callback(self):
s = selection.Elitism(decay.Linear(5, 2, result_type=int), selection.Tournament())
pop, fitness = t.rand((100,60)), t.randn((100,))
(newpop,), kargs = s(fitness, pop, iteration=13, max_iteration=23)
self.assertEqual(newpop.shape, (100,60))
def test_invalid_fraction(self):
s = selection.Elitism(decay.Linear(11.3, 6.2), selection.Tournament())
pop, fitness = t.rand((100,60)), t.randn((100,))
with self.assertRaises(ValueError):
s(fitness, pop, iteration=13, max_iteration=23)
def test_invalid_absolute(self):
s = selection.Elitism(decay.Linear(196, 112, result_type=int), selection.Tournament())
pop, fitness = t.rand((100,60)), t.randn((100,))
with self.assertRaises(ValueError):
s(fitness, pop, iteration=13, max_iteration=23)
def test_invalid_type_during_run(self):
s = selection.Elitism(lambda *_, **__: object(), selection.Tournament())
pop, fitness = t.rand((100,60)), t.randn((100,))
with self.assertRaises(ValueError):
s(fitness, pop, iteration=13, max_iteration=23)
@repeat(5)
def test_in_alg(self):
_f = lambda x: t.sum(t.pow(x, 2), dim=-1)
alg = ffeat.strategies.EvolutionStrategy(
ffeat.strategies.initialization.Uniform(100, -5.0, 5.0, 40),
ffeat.strategies.evaluation.Evaluation(_f),
ffeat.strategies.selection.Elitism(2,
ffeat.strategies.selection.Tournament(1.0),
ffeat.strategies.mutation.AddFromNormal(0.1, 0.1),
ffeat.strategies.crossover.OnePoint1D(40, replace_parents=True),
),
iterations=500
)
(pop,), kargs = alg()
self.assertTrue(t.all(_f(pop) < 1))
@unittest.skipIf(not t.cuda.is_available(), 'CUDA not available')
@repeat(5)
def test_in_alg_cuda(self):
_f = lambda x: t.sum(t.pow(x, 2), dim=-1)
alg = ffeat.strategies.EvolutionStrategy(
ffeat.strategies.initialization.Uniform(100, -5.0, 5.0, 40, device='cuda:0'),
ffeat.strategies.evaluation.Evaluation(_f),
ffeat.strategies.selection.Elitism(2,
ffeat.strategies.selection.Tournament(1.0),
ffeat.strategies.mutation.AddFromNormal(0.1, 0.1),
ffeat.strategies.crossover.OnePoint1D(40, replace_parents=True),
),
iterations=500
)
(pop,), kargs = alg()
self.assertTrue(t.all(_f(pop) < 1))
if __name__ == '__main__':
unittest.main()
| 44.089655
| 101
| 0.603003
| 843
| 6,393
| 4.428233
| 0.142349
| 0.040182
| 0.042861
| 0.053576
| 0.866059
| 0.837128
| 0.790249
| 0.753817
| 0.751942
| 0.751942
| 0
| 0.071386
| 0.219928
| 6,393
| 144
| 102
| 44.395833
| 0.677161
| 0.005631
| 0
| 0.616667
| 0
| 0
| 0.007949
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 1
| 0.125
| false
| 0
| 0.05
| 0
| 0.183333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
124633bd0efa7de4f6b01a95125a7e5dae902839
| 218
|
py
|
Python
|
lgtv_rs232/commands/screen/auto_tune.py
|
davo22/lgtv_rs232
|
40562cddf7acdf6fa95124029595e3838dd9e7b0
|
[
"MIT"
] | null | null | null |
lgtv_rs232/commands/screen/auto_tune.py
|
davo22/lgtv_rs232
|
40562cddf7acdf6fa95124029595e3838dd9e7b0
|
[
"MIT"
] | null | null | null |
lgtv_rs232/commands/screen/auto_tune.py
|
davo22/lgtv_rs232
|
40562cddf7acdf6fa95124029595e3838dd9e7b0
|
[
"MIT"
] | null | null | null |
class AutoTuneCommands(object):
_command = "ju"
def __init__(self, send_command):
self._send_command = send_command
async def call(self):
return await self._send_command(self._command, 1)
| 24.222222
| 57
| 0.688073
| 27
| 218
| 5.111111
| 0.518519
| 0.318841
| 0.326087
| 0.275362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005882
| 0.220183
| 218
| 8
| 58
| 27.25
| 0.805882
| 0
| 0
| 0
| 0
| 0
| 0.009174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
126325022449c57eefddaf73c0c0740a1f9b9aaf
| 18,759
|
py
|
Python
|
tests/timeseries/test_flag_jumps.py
|
evan-anderson-ts/pyjanitor
|
3e62283b2d07c9f94dab6b5cb5c695b517a6bf84
|
[
"MIT"
] | null | null | null |
tests/timeseries/test_flag_jumps.py
|
evan-anderson-ts/pyjanitor
|
3e62283b2d07c9f94dab6b5cb5c695b517a6bf84
|
[
"MIT"
] | null | null | null |
tests/timeseries/test_flag_jumps.py
|
evan-anderson-ts/pyjanitor
|
3e62283b2d07c9f94dab6b5cb5c695b517a6bf84
|
[
"MIT"
] | null | null | null |
"""
Unit tests for `.flag_jumps()`.
"""
import pandas as pd
import numpy as np
import pytest
import janitor.timeseries # noqa: F401
from janitor.timeseries import _flag_jumps_single_col
from janitor.errors import JanitorError
@pytest.fixture
def timeseries_dataframe() -> pd.DataFrame:
"""Returns a time series dataframe."""
ts_index = pd.date_range("1/1/2019", periods=10, freq="1H")
c1 = [*range(10)]
c2 = [*range(100, 110)]
c3 = c1[::-1]
c4 = c2[::-1]
c5 = [-2.0, -1.0, 0, 1.0, 2.0, 1.0, 0, -1.0, -2.0, -7.5]
test_df = pd.DataFrame(
{"col1": c1, "col2": c2, "col3": c3, "col4": c4, "col5": c5},
index=ts_index,
)
return test_df
@pytest.mark.timeseries
def test__flag_jumps_single_col_raises_error_for_bad_scale_type(
timeseries_dataframe
):
"""Test that invalid scale argument raises a JanitorError."""
# Setup
df = timeseries_dataframe
expected_error_msg = (
"Unrecognized scale: 'bad_scale'. "
+ "Must be one of: ['absolute', 'percentage']."
)
# Exercise
with pytest.raises(JanitorError) as error_info:
_flag_jumps_single_col(
df, col="col1", scale="bad_scale", direction="any", threshold=1
)
# Verify
assert str(error_info.value) == expected_error_msg
# Cleanup - none necessary
@pytest.mark.timeseries
def test__flag_jumps_single_col_raises_error_for_bad_direction_type(
timeseries_dataframe
):
"""Test that invalid direction argument raises a JanitorError."""
# Setup
df = timeseries_dataframe
expected_error_msg = (
"Unrecognized direction: 'bad_direction'. "
+ "Must be one of: ['increasing', 'decreasing', 'any']."
)
# Exercise
with pytest.raises(JanitorError) as error_info:
_flag_jumps_single_col(
df,
col="col1",
scale="absolute",
direction="bad_direction",
threshold=1,
)
# Verify
assert str(error_info.value) == expected_error_msg
# Cleanup - none necessary
@pytest.mark.timeseries
def test__flag_jumps_single_col_raises_error_for_bad_threshold_value(
timeseries_dataframe
):
"""Test that invalid threshold argument raises a JanitorError."""
# Setup
df = timeseries_dataframe
expected_error_msg = (
"Unrecognized threshold: -1. This value must be >= 0.0. "
+ "Use 'direction' to specify positive or negative intent."
)
# Exercise
with pytest.raises(JanitorError) as error_info:
_flag_jumps_single_col(
df, col="col1", scale="absolute", direction="any", threshold=-1
)
# Verify
assert str(error_info.value) == expected_error_msg
# Cleanup - none necessary
@pytest.mark.timeseries
@pytest.mark.parametrize(
"col, direction, expected",
[
("col1", "increasing", [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
("col2", "increasing", [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
("col3", "decreasing", [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
("col4", "decreasing", [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
("col5", "increasing", [0, 1, 1, 1, 1, 0, 0, 0, 0, 0]),
("col5", "decreasing", [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
],
)
def test__flag_jumps_single_col_absolute_scale_correct_direction(
timeseries_dataframe, col, direction, expected
):
"""
Test utility function for flagging jumps with absolute scale.
Here, the correct/anticipated `direction` is provided
(i.e. increasing when the df column is truly increasing
and decreasing when the df column is truly decreasing)
"""
# Setup
df = timeseries_dataframe
# Exercise
result = _flag_jumps_single_col(
df, col, scale="absolute", direction=direction, threshold=0.5
)
# Verify
np.testing.assert_array_equal(result.array, expected)
# Cleanup - none necessary
@pytest.mark.timeseries
@pytest.mark.parametrize(
"col, direction, expected",
[
("col1", "decreasing", [0] * 10),
("col2", "decreasing", [0] * 10),
("col3", "increasing", [0] * 10),
("col4", "increasing", [0] * 10),
("col5", "decreasing", [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
("col5", "increasing", [0, 1, 1, 1, 1, 0, 0, 0, 0, 0]),
],
)
def test__flag_jumps_single_col_absolute_scale_inverse_direction(
timeseries_dataframe, col, direction, expected
):
"""
Test utility function for flagging jumps with absolute scale.
Here, the inverse `direction` is provided so should not flag
anything (i.e. increasing when the df column is truly decreasing
and increasing when the df column is truly increasing)
"""
# Setup
df = timeseries_dataframe
# Exercise
result = _flag_jumps_single_col(
df, col, scale="absolute", direction=direction, threshold=0.5
)
# Verify
np.testing.assert_array_equal(result.array, expected)
# Cleanup - none necessary
@pytest.mark.timeseries
@pytest.mark.parametrize("col", ("col1", "col2", "col3", "col4", "col5"))
def test__flag_jumps_single_col_absolute_scale_any_direction(
timeseries_dataframe, col
):
"""
Test utility function for flagging jumps with absolute scale.
Here, the any `direction` is provided so should flag everything.
"""
# Setup
df = timeseries_dataframe
# Exercise
result = _flag_jumps_single_col(
df, col, scale="absolute", direction="any", threshold=0.5
)
# Verify
np.testing.assert_array_equal(result.array, [0, 1, 1, 1, 1, 1, 1, 1, 1, 1])
# Cleanup - none necessary
@pytest.mark.timeseries
def test__flag_jumps_single_col_absolute_scale_flags_large_jump(
timeseries_dataframe
):
"""
Test utility function for flagging jumps with absolute scale.
Here, a large threshold is used to verify only one row is flagged.
"""
# Setup
df = timeseries_dataframe
# Exercise
result_incr = _flag_jumps_single_col(
df, "col5", scale="absolute", direction="increasing", threshold=5
)
result_decr = _flag_jumps_single_col(
df, "col5", scale="absolute", direction="decreasing", threshold=5
)
result_any = _flag_jumps_single_col(
df, "col5", scale="absolute", direction="any", threshold=5
)
# Verify
np.testing.assert_array_equal(result_incr.array, [0] * 10)
np.testing.assert_array_equal(
result_decr.array, [0, 0, 0, 0, 0, 0, 0, 0, 0, 1]
)
np.testing.assert_array_equal(
result_any.array, [0, 0, 0, 0, 0, 0, 0, 0, 0, 1]
)
# Cleanup - none necessary
@pytest.mark.timeseries
@pytest.mark.parametrize(
"col, direction, expected",
[
("col1", "increasing", [0, 1, 1, 1, 1, 0, 0, 0, 0, 0]),
("col2", "increasing", [0] * 10),
("col3", "decreasing", [0, 0, 0, 0, 0, 0, 0, 1, 1, 1]),
("col4", "decreasing", [0] * 10),
("col5", "increasing", [0, 1, 1, 1, 1, 0, 0, 0, 0, 0]),
("col5", "decreasing", [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
],
)
def test__flag_jumps_single_col_percentage_scale_correct_direction(
timeseries_dataframe, col, direction, expected
):
"""
Test utility function for flagging jumps with percentage scale and
a 25% jump. Here, the correct/anticipated `direction` is provided
(i.e. increasing when the df column is truly increasing and
decreasing when the df column is truly decreasing).
"""
# Setup
df = timeseries_dataframe
# Exercise
result = _flag_jumps_single_col(
df, col, scale="percentage", direction=direction, threshold=0.25
)
# Verify
np.testing.assert_array_equal(result.array, expected)
# Cleanup - none necessary
@pytest.mark.timeseries
@pytest.mark.parametrize(
"col, direction, expected",
[
("col1", "decreasing", [0] * 10),
("col2", "decreasing", [0] * 10),
("col3", "increasing", [0] * 10),
("col4", "increasing", [0] * 10),
("col5", "decreasing", [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
("col5", "increasing", [0, 1, 1, 1, 1, 0, 0, 0, 0, 0]),
],
)
def test__flag_jumps_single_col_percentage_scale_inverse_direction(
timeseries_dataframe, col, direction, expected
):
"""
Test utility function for flagging jumps with percentage scale and
a 25% jump. Here, the inverse `direction` is provided so should not
flag anything (i.e. increasing when the df column is truly
decreasing and increasing when the df column is truly increasing).
"""
# Setup
df = timeseries_dataframe
# Exercise
result = _flag_jumps_single_col(
df, col, scale="percentage", direction=direction, threshold=0.25
)
# Verify
np.testing.assert_array_equal(result.array, expected)
# Cleanup - none necessary
@pytest.mark.timeseries
@pytest.mark.parametrize(
"col, expected",
[
("col1", [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
("col2", [0] * 10),
("col3", [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
("col4", [0] * 10),
("col5", [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
],
)
def test__flag_jumps_single_col_percentage_scale_any_direction(
timeseries_dataframe, col, expected
):
"""
Test utility function for flagging jumps with percentage scale and
a 10% jump. Here, the any direction is provided so should flag
everything.
"""
# Setup
df = timeseries_dataframe
# Exercise
result = _flag_jumps_single_col(
df, col, scale="percentage", direction="any", threshold=0.10
)
# Verify
np.testing.assert_array_equal(result.array, expected)
# Cleanup - none necessary
@pytest.mark.timeseries
def test__flag_jumps_single_col_percentage_scale_flags_large_jump(
timeseries_dataframe
):
"""
Test utility function for flagging jumps with percentage scale and
a 100% jump. Here, a large threshold is used to verify only
drastically changed rows are flagged.
"""
# Setup
df = timeseries_dataframe
# Exercise
result_incr = _flag_jumps_single_col(
df, "col5", scale="percentage", direction="increasing", threshold=1.0
)
result_decr = _flag_jumps_single_col(
df, "col5", scale="percentage", direction="decreasing", threshold=1.0
)
result_any = _flag_jumps_single_col(
df, "col5", scale="percentage", direction="any", threshold=1.0
)
# Verify
np.testing.assert_array_equal(
result_incr.array, [0, 0, 0, 1, 0, 0, 0, 0, 0, 0]
)
np.testing.assert_array_equal(
result_decr.array, [0, 0, 0, 0, 0, 0, 0, 1, 0, 1]
)
np.testing.assert_array_equal(
result_any.array, [0, 0, 0, 1, 0, 0, 0, 1, 0, 1]
)
# Cleanup - none necessary
@pytest.mark.timeseries
def test_flag_jumps_raises_error_for_strict_no_arg_dicts(timeseries_dataframe):
"""
Test that an error is raised when `strict=True`
and no input arguments are of type dict.
"""
# Setup
df = timeseries_dataframe
# Exercise
expected_error_msg = (
"When enacting 'strict=True', 'scale', 'direction', "
+ "or 'threshold' must be a dictionary."
)
# Exercise
with pytest.raises(JanitorError) as error_info:
df.flag_jumps(
scale="absolute", direction="any", threshold=0, strict=True
)
# Verify
assert str(error_info.value) == expected_error_msg
# Cleanup - none necessary
@pytest.mark.timeseries
def test_flag_jumps_default_args(timeseries_dataframe):
"""
Test the default values behave as expected.
Namely, `scale=percentage`, `direction=any`, `threshold=0.0` and
`strict=False`.
"""
# Setup
df = timeseries_dataframe
orig_cols = df.columns
expected = np.ones((10, 5), dtype=int)
expected[0, :] = 0
expected_cols = [f"{c}_jump_flag" for c in orig_cols]
expected_df = pd.DataFrame(expected, columns=expected_cols, index=df.index)
# Exercise
df = df.flag_jumps()
# Verify
assert list(df.columns) == list(orig_cols) + expected_cols
assert df.filter(regex="flag").equals(expected_df)
# Cleanup - none necessary
@pytest.mark.timeseries
def test_flag_jumps_all_args_specifed_as_non_dict(timeseries_dataframe):
"""Test provided kwargs (not of type dict) behave as expected."""
# Setup
df = timeseries_dataframe
orig_cols = df.columns
expected = np.ones((10, 5), dtype=int)
expected[0, :] = 0
expected[:, 2:4] = 0
expected[5:10, 4] = 0
expected_cols = [f"{c}_jump_flag" for c in orig_cols]
expected_df = pd.DataFrame(expected, columns=expected_cols, index=df.index)
# Exercise
df = df.flag_jumps(
scale="absolute", direction="increasing", threshold=0, strict=False
)
# Verify
assert list(df.columns) == list(orig_cols) + expected_cols
assert df.filter(regex="flag").equals(expected_df)
# Cleanup - none necessary
@pytest.mark.timeseries
def test_flag_jumps_all_args_specified_as_dict(timeseries_dataframe):
"""
Test provided kwargs (of type dict) behaves as expected.
Since strict defaults to `False`, col3, col4, and col5 will be
flagged and will use default args (`scale=percentage`,
`direction=any`, and `threshold=0.0`).
"""
df = timeseries_dataframe
orig_cols = df.columns
expected = np.ones((10, 5), dtype=int)
expected[0, :] = 0
expected[:, 0:2] = 0
expected_cols = [f"{c}_jump_flag" for c in orig_cols]
expected_df = pd.DataFrame(expected, columns=expected_cols, index=df.index)
# Exercise
df = df.flag_jumps(
scale=dict(col1="absolute", col2="percentage"),
direction=dict(col1="increasing", col2="any"),
threshold=dict(col1=1, col2=2),
)
# Verify
assert list(df.columns) == list(orig_cols) + expected_cols
assert df.filter(regex="flag").equals(expected_df)
# Cleanup - none necessary
@pytest.mark.timeseries
def test_flag_jumps_strict_with_both_cols_in_all_args(timeseries_dataframe):
"""
Test provided strict behaves as expected
(only col1 and col2 flagged).
"""
df = timeseries_dataframe
orig_cols = df.columns
expected = np.zeros((10, 2), dtype=int)
expected_cols = ["col1_jump_flag", "col2_jump_flag"]
expected_df = pd.DataFrame(expected, columns=expected_cols, index=df.index)
# Exercise
df = df.flag_jumps(
scale=dict(col1="absolute", col2="percentage"),
direction=dict(col1="increasing", col2="any"),
threshold=dict(col1=1, col2=2),
strict=True,
)
# Verify
assert list(df.columns) == list(orig_cols) + expected_cols
assert df.filter(regex="flag").equals(expected_df)
# Cleanup - none necessary
@pytest.mark.timeseries
def test_flag_jumps_strict_with_both_cols_in_at_least_one_args(
timeseries_dataframe
):
"""
Test provided strict behaves as expected
(col4 not provided in any input arg dict thus not flagged)
When left unspecified, a column will be flagged based on defaults
(`scale=percentage`, `direction=any`, `threshold=0.0`).
"""
df = timeseries_dataframe
orig_cols = df.columns
expected = np.ones((10, 4), dtype=int)
expected[0, :] = 0
expected[:, 3] = 0
expected[3, 3] = 1
expected[7, 3] = 1
expected[9, 3] = 1
expected_cols = [f"col{i}_jump_flag" for i in [1, 2, 3, 5]]
expected_df = pd.DataFrame(expected, columns=expected_cols, index=df.index)
# Exercise
df = df.flag_jumps(
scale=dict(col1="absolute", col3="absolute"),
direction=dict(col2="increasing"),
threshold=dict(col5=2),
strict=True,
)
# Verify
assert list(df.columns) == list(orig_cols) + expected_cols
assert df.filter(regex="flag").equals(expected_df)
# Cleanup - none necessary
@pytest.mark.timeseries
def test_flag_jumps_for_one_column(timeseries_dataframe):
"""
Test provided strict behaves as expected for a single column.
"""
df = timeseries_dataframe
orig_cols = df.columns
expected = [0, 1, 1, 1, 1, 1, 1, 1, 1, 1]
# Exercise
df = df.flag_jumps(scale=dict(col1="absolute"), strict=True)
# Verify
assert list(df.columns) == list(orig_cols) + ["col1_jump_flag"]
np.testing.assert_array_equal(df["col1_jump_flag"].array, expected)
# Cleanup - none necessary
@pytest.mark.timeseries
def test_flag_jumps_on_issue_provided_use_case():
"""
Test example provided in issue is solved with `flag_jumps()`
See issue # 711
"""
# Setup
df = pd.DataFrame(
data=[
["2015-01-01 00:00:00", -0.76, 2, 2, 1.2],
["2015-01-01 01:00:00", -0.73, 2, 4, 1.1],
["2015-01-01 02:00:00", -0.71, 2, 4, 1.1],
["2015-01-01 03:00:00", -0.68, 2, 32, 1.1],
["2015-01-01 04:00:00", -0.65, 2, 2, 1.0],
["2015-01-01 05:00:00", -0.76, 2, 2, 1.2],
["2015-01-01 06:00:00", -0.73, 2, 4, 1.1],
["2015-01-01 07:00:00", -0.71, 2, 4, 1.1],
["2015-01-01 08:00:00", -0.68, 2, 32, 1.1],
["2015-01-01 09:00:00", -0.65, 2, 2, 1.0],
["2015-01-01 10:00:00", -0.76, 2, 2, 1.2],
["2015-01-01 11:00:00", -0.73, 2, 4, 1.1],
["2015-01-01 12:00:00", -0.71, 2, 4, 1.1],
["2015-01-01 13:00:00", -0.68, 2, 32, 1.1],
["2015-01-01 14:00:00", -0.65, 2, 2, 1.0],
["2015-01-01 15:00:00", -0.76, 2, 2, 1.2],
["2015-01-01 16:00:00", -0.73, 2, 4, 1.1],
["2015-01-01 17:00:00", -0.71, 2, 4, 1.1],
["2015-01-01 18:00:00", -0.68, 2, 32, 1.1],
["2015-01-01 19:00:00", -0.65, 2, 2, 1.0],
["2015-01-01 20:00:00", -0.76, 2, 2, 1.2],
["2015-01-01 21:00:00", -0.73, 2, 4, 1.1],
["2015-01-01 22:00:00", -0.71, 2, 4, 1.1],
["2015-01-01 23:00:00", -0.68, 2, 32, 1.1],
["2015-01-02 00:00:00", -0.65, 2, 2, 1.0],
],
columns=["DateTime", "column1", "column2", "column3", "column4"],
)
df["DateTime"] = pd.to_datetime(df["DateTime"])
df = df.set_index("DateTime")
orig_cols = df.columns
expected = np.zeros((25, 4), dtype=int)
expected[3, 2] = 1
expected[8, 2] = 1
expected[13, 2] = 1
expected[18, 2] = 1
expected[23, 2] = 1
expected_cols = [f"{c}_jump_flag" for c in orig_cols]
expected_df = pd.DataFrame(expected, columns=expected_cols, index=df.index)
# Exercise
result = df.flag_jumps(
scale="absolute", direction="increasing", threshold=2
)
# Verify
assert list(result.columns) == list(orig_cols) + expected_cols
assert result.filter(regex="flag").equals(expected_df)
# Cleanup - none necessary
| 29.823529
| 79
| 0.621568
| 2,602
| 18,759
| 4.320907
| 0.090315
| 0.021525
| 0.022948
| 0.023837
| 0.817309
| 0.805212
| 0.774704
| 0.762341
| 0.731122
| 0.67731
| 0
| 0.074561
| 0.237859
| 18,759
| 628
| 80
| 29.871019
| 0.711828
| 0.191748
| 0
| 0.484932
| 0
| 0
| 0.132518
| 0
| 0
| 0
| 0
| 0
| 0.082192
| 1
| 0.054795
| false
| 0
| 0.016438
| 0
| 0.073973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
89dbc3b9c982f546320379158b45c6b6b393f3c0
| 102
|
py
|
Python
|
pydmps/__init__.py
|
phranq0/ros_dmp
|
75317a79c74e3da33c27d660a7ab1489edbfa7a6
|
[
"Apache-2.0"
] | 12
|
2019-03-01T08:09:05.000Z
|
2020-06-13T12:55:25.000Z
|
pydmps/__init__.py
|
phranq0/ros_dmp
|
75317a79c74e3da33c27d660a7ab1489edbfa7a6
|
[
"Apache-2.0"
] | null | null | null |
pydmps/__init__.py
|
phranq0/ros_dmp
|
75317a79c74e3da33c27d660a7ab1489edbfa7a6
|
[
"Apache-2.0"
] | 4
|
2019-02-22T14:34:57.000Z
|
2020-02-06T11:37:10.000Z
|
from .dmp import DMPs
from .dmp_discrete import DMPs_discrete
from .dmp_rhythmic import DMPs_rhythmic
| 25.5
| 39
| 0.852941
| 16
| 102
| 5.1875
| 0.375
| 0.253012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 102
| 3
| 40
| 34
| 0.922222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
89f3e19022e3d2f610e85977bbdb1fd48eceaf5d
| 280
|
py
|
Python
|
hokuto_flask/hokuto_flask/blueprints/page/views.py
|
jackdbd/hokuto-no-ken-api
|
5b092b7ecef3ad0f49215509a63cbb9c11cd5285
|
[
"MIT"
] | null | null | null |
hokuto_flask/hokuto_flask/blueprints/page/views.py
|
jackdbd/hokuto-no-ken-api
|
5b092b7ecef3ad0f49215509a63cbb9c11cd5285
|
[
"MIT"
] | 37
|
2018-09-08T22:14:58.000Z
|
2022-03-02T14:58:56.000Z
|
hokuto_flask/hokuto_flask/blueprints/page/views.py
|
jackdbd/hokuto-no-ken-api
|
5b092b7ecef3ad0f49215509a63cbb9c11cd5285
|
[
"MIT"
] | null | null | null |
from flask import Blueprint, render_template
bp = Blueprint(name="page", import_name=__name__, template_folder="templates")
@bp.route("/")
def home():
return render_template("page/home.html")
@bp.route("/about")
def about():
return render_template("page/about.html")
| 20
| 78
| 0.721429
| 37
| 280
| 5.216216
| 0.459459
| 0.217617
| 0.207254
| 0.248705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117857
| 280
| 13
| 79
| 21.538462
| 0.781377
| 0
| 0
| 0
| 0
| 0
| 0.175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0.25
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d601e276d4e1c6eb6888dd2efd8ea837585d94be
| 35
|
py
|
Python
|
src/mgenomicsremotemail/dispatch/__init__.py
|
MarcoMernberger/mgenomicsremotemail
|
e3297861c288e8f5126383c335d16ff6c87e29c7
|
[
"MIT"
] | 1
|
2022-03-04T11:55:04.000Z
|
2022-03-04T11:55:04.000Z
|
src/mgenomicsremotemail/dispatch/__init__.py
|
MarcoMernberger/mgenomicsremotemail
|
e3297861c288e8f5126383c335d16ff6c87e29c7
|
[
"MIT"
] | null | null | null |
src/mgenomicsremotemail/dispatch/__init__.py
|
MarcoMernberger/mgenomicsremotemail
|
e3297861c288e8f5126383c335d16ff6c87e29c7
|
[
"MIT"
] | null | null | null |
from .dispatch import RunDispatcher
| 35
| 35
| 0.885714
| 4
| 35
| 7.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d60b1ec0ee356dbeb9ea607c8c684386807b16ca
| 8,693
|
py
|
Python
|
cumulusci/tasks/salesforce/tests/test_enable_prediction.py
|
davisagli/CumulusCI
|
fd74c324ad3ff662484b159395c639879011e711
|
[
"BSD-3-Clause"
] | null | null | null |
cumulusci/tasks/salesforce/tests/test_enable_prediction.py
|
davisagli/CumulusCI
|
fd74c324ad3ff662484b159395c639879011e711
|
[
"BSD-3-Clause"
] | null | null | null |
cumulusci/tasks/salesforce/tests/test_enable_prediction.py
|
davisagli/CumulusCI
|
fd74c324ad3ff662484b159395c639879011e711
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
import responses
from cumulusci.core.config.org_config import OrgConfig
from cumulusci.core.exceptions import CumulusCIException
from cumulusci.tasks.salesforce.enable_prediction import EnablePrediction
from cumulusci.tests.util import DummyKeychain
from .util import create_task
@pytest.fixture
def task():
return create_task(
EnablePrediction,
{"api_names": ["test_prediction_v0", "test_prediction_2_v0"]},
org_config=OrgConfig(
{"instance_url": "https://test-dev-ed.my.salesforce.com"},
"test",
keychain=DummyKeychain(),
),
)
@pytest.fixture
def mock_oauth():
with responses.RequestsMock() as rsps:
rsps.add(
"POST",
"https://test-dev-ed.my.salesforce.com/services/oauth2/token",
json={
"access_token": "TOKEN",
"instance_url": "https://test-dev-ed.my.salesforce.com",
},
)
rsps.add(
"GET",
url="https://test-dev-ed.my.salesforce.com/services/oauth2/userinfo",
json={},
status=200,
)
rsps.add(
"GET",
url="https://test-dev-ed.my.salesforce.com/services/data",
json=[
{
"label": "Summer '21",
"url": "/services/data/v52.0",
"version": "52.0",
}
],
status=200,
)
rsps.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/sobjects/Organization/",
json={
"OrganizationType": "Developer",
"IsSandbox": False,
"InstanceName": "NA149",
"NamespacePrefix": None,
},
)
yield rsps
def test_run_task(mock_oauth, task):
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT+Id+FROM+MLPredictionDefinition+WHERE+DeveloperName+%3D+%27test_prediction_v0%27",
json={"totalSize": 1, "records": [{"Id": "001"}]},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT+Id+FROM+MLPredictionDefinition+WHERE+DeveloperName+%3D+%27test_prediction_2_v0%27",
json={"totalSize": 1, "records": [{"Id": "002"}]},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/001",
json={"Metadata": {"status": "Draft"}},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/002",
json={"Metadata": {"status": "Draft"}},
)
mock_oauth.add(
method="PATCH",
url="https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/001",
match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})],
)
mock_oauth.add(
method="PATCH",
url="https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/002",
match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})],
)
task()
def test_run_task__not_found_exception(mock_oauth, task):
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT+Id+FROM+MLPredictionDefinition+WHERE+DeveloperName+%3D+%27test_prediction_v0%27",
json={"totalSize": 0, "records": []},
)
with pytest.raises(CumulusCIException) as e:
task()
assert "not found" in str(e)
def test_run_task__failed_update_exception(mock_oauth, task):
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT+Id+FROM+MLPredictionDefinition+WHERE+DeveloperName+%3D+%27test_prediction_v0%27",
json={"totalSize": 1, "records": [{"Id": "001"}]},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/001",
json={"Metadata": {"status": "Draft"}},
)
mock_oauth.add(
method="PATCH",
url="https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/001",
status=400,
)
with pytest.raises(CumulusCIException):
task()
def test_run_task__namespaced_org(mock_oauth, task):
task.options["namespaced_org"] = True
task.options["namespace_inject"] = "foo"
task.options["api_names"] = [
"%%%NAMESPACED_ORG%%%test_prediction_v0",
"%%%NAMESPACED_ORG%%%test_prediction_2_v0",
]
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT+Id+FROM+MLPredictionDefinition+WHERE+DeveloperName+%3D+%27foo__test_prediction_v0%27",
json={"totalSize": 1, "records": [{"Id": "001"}]},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT+Id+FROM+MLPredictionDefinition+WHERE+DeveloperName+%3D+%27foo__test_prediction_2_v0%27",
json={"totalSize": 1, "records": [{"Id": "002"}]},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/001",
json={"Metadata": {"status": "Draft"}},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/002",
json={"Metadata": {"status": "Draft"}},
)
mock_oauth.add(
method="PATCH",
url="https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/001",
match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})],
)
mock_oauth.add(
method="PATCH",
url="https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/002",
match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})],
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT%20SubscriberPackage.Id,%20SubscriberPackage.NamespacePrefix,%20SubscriberPackageVersionId%20FROM%20InstalledSubscriberPackage",
json={"totalSize": 0, "records": []},
)
mock_oauth.replace(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/sobjects/Organization/",
json={
"OrganizationType": "Developer",
"IsSandbox": False,
"InstanceName": "NA149",
"NamespacePrefix": "foo",
},
)
task()
def test_run_task__managed_org(mock_oauth, task):
task.options["managed"] = True
task.options["namespace_inject"] = "foo"
task.options["api_names"] = [
"%%%NAMESPACE%%%test_prediction_v0",
"%%%NAMESPACE%%%test_prediction_2_v0",
]
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT+Id+FROM+MLPredictionDefinition+WHERE+DeveloperName+%3D+%27foo__test_prediction_v0%27",
json={"totalSize": 1, "records": [{"Id": "001"}]},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/query/?q=SELECT+Id+FROM+MLPredictionDefinition+WHERE+DeveloperName+%3D+%27foo__test_prediction_2_v0%27",
json={"totalSize": 1, "records": [{"Id": "002"}]},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/001",
json={"Metadata": {"status": "Draft"}},
)
mock_oauth.add(
"GET",
"https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/002",
json={"Metadata": {"status": "Draft"}},
)
mock_oauth.add(
method="PATCH",
url="https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/001",
match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})],
)
mock_oauth.add(
method="PATCH",
url="https://test-dev-ed.my.salesforce.com/services/data/v52.0/tooling/sobjects/MLPredictionDefinition/002",
match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})],
)
task()
| 37.149573
| 218
| 0.618314
| 988
| 8,693
| 5.314777
| 0.123482
| 0.051419
| 0.068558
| 0.079985
| 0.812417
| 0.798705
| 0.788421
| 0.788421
| 0.788421
| 0.763855
| 0
| 0.035313
| 0.211665
| 8,693
| 233
| 219
| 37.309013
| 0.730921
| 0
| 0
| 0.581731
| 0
| 0.120192
| 0.504544
| 0.016795
| 0
| 0
| 0
| 0
| 0.004808
| 1
| 0.033654
| false
| 0
| 0.033654
| 0.004808
| 0.072115
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d6590ebc0b2a6c5bc8130ea8763d952bf4d10b33
| 1,749
|
py
|
Python
|
008/solution.py
|
klen/euler
|
269d765a2070f08ede45604909f6b5000917ee2f
|
[
"MIT"
] | 1
|
2015-01-28T09:15:30.000Z
|
2015-01-28T09:15:30.000Z
|
008/solution.py
|
klen/euler
|
269d765a2070f08ede45604909f6b5000917ee2f
|
[
"MIT"
] | null | null | null |
008/solution.py
|
klen/euler
|
269d765a2070f08ede45604909f6b5000917ee2f
|
[
"MIT"
] | null | null | null |
""" Project Euler problem #8. """
import operator as op
def problem():
""" Solve the problem.
Find the greatest product of five consecutive digits in the 1000-digit
number.
Answer: 40824
"""
number = ''.join([
'73167176531330624919225119674426574742355349194934'
'96983520312774506326239578318016984801869478851843',
'85861560789112949495459501737958331952853208805511',
'12540698747158523863050715693290963295227443043557',
'66896648950445244523161731856403098711121722383113',
'62229893423380308135336276614282806444486645238749',
'30358907296290491560440772390713810515859307960866',
'70172427121883998797908792274921901699720888093776',
'65727333001053367881220235421809751254540594752243',
'52584907711670556013604839586446706324415722155397',
'53697817977846174064955149290862569321978468622482',
'83972241375657056057490261407972968652414535100474',
'82166370484403199890008895243450658541227588666881',
'16427171479924442928230863465674813919123162824586',
'17866458359124566529476545682848912883142607690042',
'24219022671055626321111109370544217506941658960408',
'07198403850962455444362981230987879927244284909188',
'84580156166097919133875499200524063689912560717606',
'05886116467109405077541002256983155200055935729725',
'71636269561882670428252483600823257530420752963450'])
size = 5
chunks = [number[i:i + size] for i in range(0, len(number) - size)]
chunks = [map(int, chunk) for chunk in chunks]
products = [reduce(op.mul, chunk) for chunk in chunks]
return max(products)
if __name__ == '__main__':
print problem()
| 38.866667
| 74
| 0.753002
| 90
| 1,749
| 14.544444
| 0.722222
| 0.012223
| 0.019862
| 0.022918
| 0.032086
| 0
| 0
| 0
| 0
| 0
| 0
| 0.702778
| 0.176672
| 1,749
| 44
| 75
| 39.75
| 0.20625
| 0
| 0
| 0
| 0
| 0
| 0.638783
| 0.633714
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.033333
| null | null | 0.033333
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c3a042551d80353a4ffb05cc7d2f7391ca572a8c
| 12,402
|
py
|
Python
|
Avances/valeria&maicol.py
|
DiegoC386/Proyecto_CajeroEAN
|
48420b1ea64dd2d3bf695a10b7ab962c4b533cf4
|
[
"MIT"
] | null | null | null |
Avances/valeria&maicol.py
|
DiegoC386/Proyecto_CajeroEAN
|
48420b1ea64dd2d3bf695a10b7ab962c4b533cf4
|
[
"MIT"
] | null | null | null |
Avances/valeria&maicol.py
|
DiegoC386/Proyecto_CajeroEAN
|
48420b1ea64dd2d3bf695a10b7ab962c4b533cf4
|
[
"MIT"
] | null | null | null |
"""
#Entradas
Usuario-->input-->U
Contraseña-->input-->C
Opcion 1-->int-->1
Opcion 2-->int-->2
Opcion 3-->int-->3
Opcion 4-->int-->4
#Salidas
Operaciones-->float-->O.realizadas
"""
from datetime import datetime
ahora=datetime.now()
print(ahora.strftime('%Y-%m-%d %H:%M:%S'))
Cuentas=[]
print("......................................:CAJERO.EAN:..................................")
print("Por la seguridad del usuario solo es permitido 3 intentos para ingresar a su cuenta 😎")
#CAJERO EAN
usuario="1818"
passw="2027"
cont=0
saldo=500000
y=1
conectado=bool;
#INVENTARIO BILLETES Y MONEDAS
monedas50=20
monedas100=20
monedas200=20
monedas500=20
monedas1000=20
billetes1000=20
billetes2000=20
billetes5000=20
billetes10000=20
billetes20000=20
billetes50000=10
billetes100000=5
caja=1795000
while cont<3:
us=input("Ingrese usuario 🙍 : ");
co=input("Ingrese contraseña 🔑 : ");
if us==usuario and passw==co:
print("......................................:CAJERO.EAN:..................................")
print ("Bienvenido al sistema 💰 ")
print ("Escoja el soporte a solicitar:")
print("1. BANCO EAN 💰 ")
print("2. PARQUEADERO EAN 🚗")
print("3. RESTAURANTE EAN 🍽️")
print("......................................:CAJERO.EAN:..................................")
soporte=int(input("Digite una opcion de MENU: "))
conectado=True
break
else:
cont=cont+1;
print ("Usuario y contraseña incorrecta ❌")
conectado=False
while conectado:
if (soporte==1):
print("......................................:CAJERO.EAN:..................................")
print("\t.:MENU BANCO:.")
print("")
print("1. Consultar saldo🔍")
print("2. Retirar dinero 💵")
print("3. Transferir 💸")
print("4. SALIR 🔚")
print("......................................:CAJERO.EAN:..................................")
Opcion=int(input("Digite una opcion de MENU: "))
print("")
if(Opcion==1):
print ("Su saldo es:", saldo)
print("")
elif(Opcion==2):
Retiro=int(input("Digite la cantidad que desea retirar: "))
print(f"Operacion realizada, su saldo actual es: {saldo-Retiro}")
saldo=saldo-Retiro
if Retiro>saldo:
print("No tiene saldo disponible 🙁")
print("......................................:CAJERO.EAN:..................................")
print("")
elif(Opcion==3):
cu2=input("Ingrese cuenta a Depositar: ")
Cuentas.append(cu2)
monto=int(input("Ingrese monto a Transferir: "))
saldo=saldo-monto
saldo2=saldo
print ("Se han transferido", monto,"pesos a la cuenta",cu2, "✔")
print ("El nuevo saldo es:",saldo)
print("")
elif(Opcion==4):
print("Gracias por usar el Cajero EAN 🙌")
print("......................................:CAJERO.EAN:..................................")
print("")
break
else:
print("ERROR, Opcion no valida para el cajero EAN 🛑 ❌")
break
elif (soporte==2):
print("\t.:MENU PARQUEADERO 🚗:.")
print("")
print("1. Consultar dinero caja 💵")
print("2. Retirar vueltas 🎫")
print("3. SALIR 🔚")
print("......................................:CAJERO.EAN:..................................")
Opcion=int(input("Digite una opcion de MENU: "))
print("")
if (Opcion==1):
print("Monto total en la caja ")
print(caja)
elif(Opcion==2):
print("Ingrese billetes/monedas a tomar: \n")
print("1. monedas de 50 \n2. monedas de 100 \n3. monedas de 200 \n4. monedas de 500 \n5. monedas de 1000 \n6. billetes de 1000 \n7. billetes de 2000 \n8. billetes de 5000 \n9. billetes de 10000 \n10. billetes de 20000 \n11. billetes de 50000 \n")
totalvueltas=0
while y==1:
VueltasCaja = int(input("INGRESE NUMERO 1 A 11 \n"))
if (VueltasCaja==1):
x = int(input("cantidad de monedas a tomar? \n"))
if (x<=monedas50):
totalvueltas =(totalvueltas+(50*x))
monedas50=monedas50-x
else:
print("cantidad en la caja insuficiente")
elif(VueltasCaja==2):
x = int(input("cantidad de monedas a tomar? \n"))
if (x<=monedas100):
totalvueltas =(totalvueltas+(100*x))
monedas100=monedas100-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==3):
x = int(input("cantidad de monedas a tomar? \n"))
if (x<=monedas200):
totalvueltas =(totalvueltas+(200*x))
monedas200=monedas200-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==4):
x = int(input("cantidad de monedas a tomar?\n"))
if (x<=monedas500):
totalvueltas =(totalvueltas+(500*x))
monedas500=monedas500-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==5):
x = int(input("cantidad de monedas a tomar? \n"))
if (x<=monedas1000):
totalvueltas =(totalvueltas+(1000*x))
monedas1000=monedas1000-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==6):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes1000):
totalvueltas =(totalvueltas+(1000*x))
billetes1000=billetes1000-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==7):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes2000):
totalvueltas =(totalvueltas+(2000*x))
billetes2000=billetes2000-x
elif (VueltasCaja==8):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<billetes5000):
totalvueltas =(totalvueltas+(5000*x))
billetes5000=billetes5000-x
elif (VueltasCaja==9):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes10000):
totalvueltas =(totalvueltas+(10000*x))
billetes10000=billetes10000-x
elif (VueltasCaja==10):
x = int(input("cantidad de billetes a tomar? \n"))
totalvueltas =(totalvueltas+(20000*x))
if (x<=billetes20000):
totalvueltas =(totalvueltas+(20000*x))
billetes20000=billetes20000-x
elif (VueltasCaja==11):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes50000):
totalvueltas =(totalvueltas+(50000*x))
billetes50000=billetes50000-x
print ("desea retirar mas cambio?")
y = int(input("1) Si \n2) No \n"))
print("cambio retirado:")
print(totalvueltas)
print("Nuevo saldo caja")
print(caja-totalvueltas)
caja=caja-totalvueltas
y=1
elif(Opcion==3):
print("Gracias por usar el Cajero EAN 🙌")
print("......................................:CAJERO.EAN:..................................")
print("")
break
elif (soporte==3):
print("\t.:MENU RESTAURANTE 🍽️:.")
print("")
print("1. Consultar dinero caja 💵")
print("2. Retirar vueltas 🎫")
print("3. SALIR 🔚")
print("......................................:CAJERO.EAN:..................................")
Opcion=int(input("Digite una opcion de MENU: "))
print("")
if (Opcion==1):
print("Monto total en la caja ")
print(caja)
elif(Opcion==2):
print("Ingrese billetes/monedas a tomar: \n")
print("1. monedas de 50 \n2. monedas de 100 \n3. monedas de 200 \n4. monedas de 500 \n5. monedas de 1000 \n6. billetes de 1000 \n7. billetes de 2000 \n8. billetes de 5000 \n9. billetes de 10000 \n10. billetes de 20000 \n11. billetes de 50000 \n")
totalvueltas=0
while y==1:
VueltasCaja = int(input("INGRESE NUMERO 1 A 11 \n"))
if (VueltasCaja==1):
x = int(input("cantidad de monedas a tomar? \n"))
if (x<=monedas50):
totalvueltas =(totalvueltas+(50*x))
monedas50=monedas50-x
else:
print("cantidad en la caja insuficiente")
elif(VueltasCaja==2):
x = int(input("cantidad de monedas a tomar? \n"))
if (x<=monedas100):
totalvueltas =(totalvueltas+(100*x))
monedas100=monedas100-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==3):
x = int(input("cantidad de monedas a tomar? \n"))
if (x<=monedas200):
totalvueltas =(totalvueltas+(200*x))
monedas200=monedas200-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==4):
x = int(input("cantidad de monedas a tomar?\n"))
if (x<=monedas500):
totalvueltas =(totalvueltas+(500*x))
monedas500=monedas500-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==5):
x = int(input("cantidad de monedas a tomar? \n"))
if (x<=monedas1000):
totalvueltas =(totalvueltas+(1000*x))
monedas1000=monedas1000-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==6):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes1000):
totalvueltas =(totalvueltas+(1000*x))
billetes1000=billetes1000-x
else:
print("cantidad en la caja insuficiente")
elif (VueltasCaja==7):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes2000):
totalvueltas =(totalvueltas+(2000*x))
billetes2000=billetes2000-x
elif (VueltasCaja==8):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes5000):
totalvueltas =(totalvueltas+(5000*x))
billetes5000=billetes5000-x
elif (VueltasCaja==9):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes10000):
totalvueltas =(totalvueltas+(10000*x))
billetes10000=billetes10000-x
elif (VueltasCaja==10):
x = int(input("cantidad de billetes a tomar? \n"))
totalvueltas =(totalvueltas+(20000*x))
if (x<=billetes20000):
totalvueltas =(totalvueltas+(20000*x))
billetes20000=billetes20000-x
elif (VueltasCaja==11):
x = int(input("cantidad de billetes a tomar? \n"))
if (x<=billetes50000):
totalvueltas =(totalvueltas+(50000*x))
billetes50000=billetes50000-x
print ("desea retirar mas cambio?")
y = int(input("1) Si \n2) No \n"))
print("cambio retirado:")
print(totalvueltas)
print("Nuevo saldo caja")
print(caja-totalvueltas)
caja=caja-totalvueltas
y=1
elif(Opcion==3):
print("Gracias por usar el Cajero EAN 🙌")
print("......................................:CAJERO.EAN:..................................")
print("")
break
else:
print("ERROR, Opcion no valida para el cajero EAN 🛑 ❌")
break
| 41.757576
| 256
| 0.485244
| 1,259
| 12,402
| 4.804607
| 0.147736
| 0.042321
| 0.027773
| 0.061828
| 0.771367
| 0.771367
| 0.76244
| 0.757646
| 0.757646
| 0.757646
| 0
| 0.076718
| 0.334704
| 12,402
| 297
| 257
| 41.757576
| 0.652648
| 0.017094
| 0
| 0.774648
| 0
| 0.007042
| 0.316861
| 0.07585
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.007042
| 0.003521
| 0
| 0.003521
| 0.295775
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c3b40094cdbb7d7b912972302831457b5b8b2b44
| 264
|
py
|
Python
|
jsonprclibveles/__init__.py
|
Borbaris161/jsonrpclibveles
|
489f2069a0924315a632c0af58f5082e3e30fdd2
|
[
"Apache-2.0"
] | null | null | null |
jsonprclibveles/__init__.py
|
Borbaris161/jsonrpclibveles
|
489f2069a0924315a632c0af58f5082e3e30fdd2
|
[
"Apache-2.0"
] | null | null | null |
jsonprclibveles/__init__.py
|
Borbaris161/jsonrpclibveles
|
489f2069a0924315a632c0af58f5082e3e30fdd2
|
[
"Apache-2.0"
] | 1
|
2019-06-28T12:32:43.000Z
|
2019-06-28T12:32:43.000Z
|
from jsonprclibveles.config import Config
config = Config.instance()
from jsonprclibveles.history import History
logs = History.instance()
from jsonprclibveles.jsonrpc import Server, MultiCall, Fault
from jsonprclibveles.jsonrpc import ProtocolError, loads, dumps
| 37.714286
| 63
| 0.840909
| 30
| 264
| 7.4
| 0.466667
| 0.342342
| 0.243243
| 0.288288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098485
| 264
| 6
| 64
| 44
| 0.932773
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7f1096b5c57ad9d7d884893baa97f9505eeecfb6
| 107
|
py
|
Python
|
Utils/py/naoth/naoth/log/__init__.py
|
BerlinUnited/NaoTH
|
02848ac10c16a5349f1735da8122a64d601a5c75
|
[
"ECL-2.0",
"Apache-2.0"
] | 15
|
2015-01-12T10:46:29.000Z
|
2022-03-28T05:13:14.000Z
|
Utils/py/naoth/naoth/log/__init__.py
|
BerlinUnited/NaoTH
|
02848ac10c16a5349f1735da8122a64d601a5c75
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-01-20T21:07:50.000Z
|
2020-01-22T14:00:28.000Z
|
Utils/py/naoth/naoth/log/__init__.py
|
BerlinUnited/NaoTH
|
02848ac10c16a5349f1735da8122a64d601a5c75
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2018-02-07T18:18:10.000Z
|
2019-10-15T17:01:41.000Z
|
from ._experimental_reader import *
from ._experimental_parser import *
from ._experimental_xabsl import *
| 26.75
| 35
| 0.831776
| 12
| 107
| 6.916667
| 0.5
| 0.578313
| 0.53012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11215
| 107
| 3
| 36
| 35.666667
| 0.873684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
7f1a8bfe5a3ecec7a4af5d80f731b5b876999cc1
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/98/77/ad/d4b81b1e7f467e009818d6572a8fabb1e0d6ade02f51c0236707607931
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.447917
| 0
| 96
| 1
| 96
| 96
| 0.447917
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6147aa931783def027236f243de14ef1bbc5a65e
| 37
|
py
|
Python
|
dlutils/preprocessing/__init__.py
|
fmi-basel/dl-utils
|
15bbb7672314d72abc7c3f7fc86655401cde5eb6
|
[
"MIT"
] | 26
|
2018-11-30T09:17:17.000Z
|
2020-11-07T01:53:07.000Z
|
idunn/instant_answer/__init__.py
|
QwantResearch/idunn
|
88b6862f1036187855b5541bbb6758ddd4df33c1
|
[
"Apache-2.0"
] | 38
|
2018-06-08T09:41:04.000Z
|
2020-12-07T17:39:12.000Z
|
idunn/instant_answer/__init__.py
|
Qwant/idunn
|
65582dfed732093778bf7c2998db1e2cd78255b8
|
[
"Apache-2.0"
] | 9
|
2018-05-18T13:07:00.000Z
|
2020-08-01T16:42:40.000Z
|
from .normalization import normalize
| 18.5
| 36
| 0.864865
| 4
| 37
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
614dca950e738511efd67bd07c116810f11eac32
| 1,973
|
py
|
Python
|
tests/espressodb_tests/espressodb_tests/customizations/migrations/0001_initial.py
|
callat-qcd/espressodb
|
70f88f33ba49f754ba240242c0653379ff269619
|
[
"BSD-3-Clause"
] | 8
|
2019-12-10T04:30:01.000Z
|
2020-10-30T09:40:22.000Z
|
tests/espressodb_tests/espressodb_tests/customizations/migrations/0001_initial.py
|
callat-qcd/espressodb
|
70f88f33ba49f754ba240242c0653379ff269619
|
[
"BSD-3-Clause"
] | 41
|
2019-10-23T00:26:25.000Z
|
2021-10-21T07:55:57.000Z
|
tests/espressodb_tests/espressodb_tests/customizations/migrations/0001_initial.py
|
callat-qcd/espressodb
|
70f88f33ba49f754ba240242c0653379ff269619
|
[
"BSD-3-Clause"
] | 3
|
2020-01-09T21:29:09.000Z
|
2021-03-14T22:20:52.000Z
|
# Generated by Django 3.0.5 on 2020-04-17 11:33
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CB',
fields=[
('id', models.AutoField(help_text='Primary key for Base class.', primary_key=True, serialize=False)),
('last_modified', models.DateTimeField(auto_now=True, help_text='Date the class was last modified')),
('tag', models.CharField(blank=True, help_text='User defined tag for easy searches', max_length=200, null=True)),
('value', models.IntegerField(null=True)),
('user', models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='CA',
fields=[
('id', models.AutoField(help_text='Primary key for Base class.', primary_key=True, serialize=False)),
('last_modified', models.DateTimeField(auto_now=True, help_text='Date the class was last modified')),
('tag', models.CharField(blank=True, help_text='User defined tag for easy searches', max_length=200, null=True)),
('user', models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
]
| 45.883721
| 247
| 0.62443
| 234
| 1,973
| 5.15812
| 0.358974
| 0.053024
| 0.059652
| 0.056338
| 0.737365
| 0.737365
| 0.737365
| 0.737365
| 0.737365
| 0.737365
| 0
| 0.014344
| 0.257983
| 1,973
| 42
| 248
| 46.97619
| 0.810109
| 0.022808
| 0
| 0.571429
| 1
| 0
| 0.227934
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.085714
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
61825a222d6e0ff5f779575f9563937a7dc74497
| 32
|
py
|
Python
|
src/perfprof/__init__.py
|
dmsteck/perfprof.py
|
bb813b234f39139e34e06d774fd866a6a189e0ad
|
[
"MIT"
] | 1
|
2021-07-09T11:40:36.000Z
|
2021-07-09T11:40:36.000Z
|
src/perfprof/__init__.py
|
dmsteck/perfprof.py
|
bb813b234f39139e34e06d774fd866a6a189e0ad
|
[
"MIT"
] | null | null | null |
src/perfprof/__init__.py
|
dmsteck/perfprof.py
|
bb813b234f39139e34e06d774fd866a6a189e0ad
|
[
"MIT"
] | 3
|
2021-09-12T16:23:25.000Z
|
2022-03-18T22:35:54.000Z
|
from .perfprof import perfprof
| 16
| 31
| 0.8125
| 4
| 32
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 32
| 1
| 32
| 32
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6185697a874dd9873ab43e9531adcc5499bed5b7
| 38,719
|
py
|
Python
|
pyfos/utils/extension/lan_flow_statistics_show.py
|
brocade/pyfos
|
33565cfc1401f5dc54a9a9fb70913c9a670cb322
|
[
"Apache-2.0"
] | 44
|
2017-11-17T12:03:11.000Z
|
2022-02-03T20:57:56.000Z
|
pyfos/utils/extension/lan_flow_statistics_show.py
|
brocade/pyfos
|
33565cfc1401f5dc54a9a9fb70913c9a670cb322
|
[
"Apache-2.0"
] | 13
|
2018-10-09T15:34:15.000Z
|
2022-02-24T20:03:17.000Z
|
pyfos/utils/extension/lan_flow_statistics_show.py
|
brocade/pyfos
|
33565cfc1401f5dc54a9a9fb70913c9a670cb322
|
[
"Apache-2.0"
] | 23
|
2017-12-14T18:08:33.000Z
|
2022-02-03T15:33:40.000Z
|
#!/usr/bin/env python3
# Copyright © 2019-2020 Broadcom. All rights reserved.
# The term “Broadcom” refers to Broadcom Inc. and/or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may also obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# lan_flow_statistics_show.py(pyGen v1.0.0)
"""
:mod:`lan_flow_statistics_show` - PyFOS util to show for\
lan_flow_statistics
******************************************************************************\
*******************************************************************************
The:mod:`lan_flow_statistics_show` PyFOS util to show for lan_flow_statistics
The LAN per-flow statistics.
lan_flow_statistics_show: usage
* Infrastructure Options:
* -i,--ipaddr=IPADDR: The IP address of the FOS switch.
* -L,--login=LOGIN: The login name.
* -P,--password=PASSWORD: The password.
* -f,--vfid=VFID: The VFID to which the request is directed.
* -s,--secured=MODE: The HTTPS mode "self" or "CA" [Optional].
* -v,--verbose: Verbose mode [Optional].
* -a,--authtoken: AuthToken value or AuthTokenManager config\
file[OPTIONAL].
* -z,--nosession: Sessionless authentication based login[OPTIONAL].
* --nocredential: No credential to be sent in the request[OPTIONAL].
* Util Script Options:
* --out-bytes-wan-compression=OUT-BYTES-WAN-COMPRESSION: Total bytes sent\
compression engine on WAN.
* --in-packets-lan-session-manager=IN-PACKETS-LAN-SESSION-MANAGER: Total\
packets received by LAN session manager.
* --vlan-priority=VLAN-PRIORITY: Specifies the VLAN priority associated\
with the flow.
* --traffic-control-list-name=TRAFFIC-CONTROL-LIST-NAME: The\
traffic-control-list name matching the flow filter to allow the\
traffic.
* --out-bytes-lan-session-manager=OUT-BYTES-LAN-SESSION-MANAGER: Total\
bytes sent by LAN session manager.
* --tcp-retransmits=TCP-RETRANSMITS: TCP retransmits /lost packets.
* --zero-window-count=ZERO-WINDOW-COUNT: The count of TCP zero window\
encountered.
* --active-flow=ACTIVE-FLOW: Indicates that LAN flow is currently active. \
true: The flow is active. false: The flow is not active.
* --in-drops-lan-session-manager=IN-DROPS-LAN-SESSION-MANAGER: The number\
of drops at the ingress from LAN session manager.
* --destination-port=DESTINATION-PORT: Remote destination port number of\
the LAN flow.
* --in-bytes-average=IN-BYTES-AVERAGE: The throughput in bps for packets\
received via an extension tunnel over WAN per 30s average.
* --destination-ip-address=DESTINATION-IP-ADDRESS: Destination IP address\
corresponding to the LAN flow.
* --in-bytes-lan-session-manager=IN-BYTES-LAN-SESSION-MANAGER: Total bytes\
received by LAN session manager.
* --in-bytes-lan-compression=IN-BYTES-LAN-COMPRESSION: Total bytes\
received by compression engine from LAN.
* --out-tcp-packets=OUT-TCP-PACKETS: Total TCP packets sent.
* --duplicate-acknowledgement=DUPLICATE-ACKNOWLEDGEMENT: TCP duplicate ACK\
received.
* --source-port=SOURCE-PORT: Source port number of the LAN flow.
* --dp-id=DP-ID: Extension Data Path Processor ID associated with flow.\
Based on platform either it will have a single DP or dual DP. In case\
of single DP only DP0 is supported, and in case of dual DP both DP0\
and DP1 are supported 0 : DP0 1 : DP1.
* --in-bytes-wan-compression=IN-BYTES-WAN-COMPRESSION: Total bytes\
received by compression engine from WAN.
* --in-tcp-bytes=IN-TCP-BYTES: Total bytes received.
* --vlan-id=VLAN-ID: Specifies the VLAN ID associated with the flow. When\
not set, this value will show up as 0.
* --out-packets-lan-session-manager=OUT-PACKETS-LAN-SESSION-MANAGER: Total\
packets sent by LAN session manager.
* --zero-window-maximum-duration=ZERO-WINDOW-MAXIMUM-DURATION: The maximum\
of zero window duration encountered.
* --source-ip-address=SOURCE-IP-ADDRESS: Source IP address corresponding\
to the LAN flow.
* --fast-retransmits=FAST-RETRANSMITS: TCP fast retransmits count.
* --rtt=RTT: round trip time.
* --ve-port=VE-PORT: The VE port of the extension-tunnel interface.
* --hcl-flow=HCL-FLOW: Indicates that LAN flow is in HCL. true: The flow\
is in HCL. false: The flow is not in HCL.
* --dscp=DSCP: DSCP value for the LAN flow.
* --slot=SLOT: In case of non-chassis system, the slot number is always 0.\
In case of chassis system, it is the slot number of chassis in which\
the extension blade is inserted in. In case of chassis, slot number is\
non-zero value.
* --crc-errors=CRC-ERRORS: Number of CRC errors encountered.
* --lan-interface=LAN-INTERFACE: The interface corresponding to the\
traffic. This could be either a GE port or a LAG name associated with\
the LAN flow.
* --local-host-mss=LOCAL-HOST-MSS: The local-host-mss is the MSS of the\
TCP connection at the LAN ingress side connected host.
* --start-time=START-TIME: Indicates the LAN flow start time.
* --out-tcp-bytes=OUT-TCP-BYTES: Total bytes sent.
* --out-bytes-average=OUT-BYTES-AVERAGE: The throughput in bps for packets\
sent over the extension tunnel on WAN per 30s average.
* --slow-retransmits=SLOW-RETRANSMITS: TCP slow retransmits count.
* --out-drops-lan-session-manager=OUT-DROPS-LAN-SESSION-MANAGER: The\
number of drops at the egress from LAN session manager.
* --out-bytes-lan-compression=OUT-BYTES-LAN-COMPRESSION: Total bytes sent\
by compression engine on LAN.
* --flow-index=FLOW-INDEX: flow index associated with the LAN flow. This\
is a dynamic index associated with the LAN flow. Depending on the LAN\
flow behavior the index may change and also can get reused after some\
time but at any given time they will be unique.
* --qos=QOS: The IP priority QOS associated with the flow.
* --in-tcp-packets=IN-TCP-PACKETS: Total TCP packets received.
* --tcp-out-of-order-packets=TCP-OUT-OF-ORDER-PACKETS: TCP total out of\
order packets.
* --end-time=END-TIME: Indicates the LAN flow end time.
* --remote-host-mss=REMOTE-HOST-MSS: The remote-host-mss is the MSS of the\
TCP connection at peer extension tunnel endpoint connected host to its\
the LAN ingress side.
* --protocol=PROTOCOL: Describes that the Layer 4 protocol of the flow.
* Output:
* Python dictionary content with RESTCONF response data.
.. function:: lan_flow_statistics_show.show_lan_flow_statistics(session,\
out_bytes_wan_compression, in_packets_lan_session_manager, vlan_priority,\
traffic_control_list_name, out_bytes_lan_session_manager, tcp_retransmits,\
zero_window_count, active_flow, in_drops_lan_session_manager,\
destination_port, in_bytes_average, destination_ip_address,\
in_bytes_lan_session_manager, in_bytes_lan_compression, out_tcp_packets,\
duplicate_acknowledgement, source_port, dp_id, in_bytes_wan_compression,\
in_tcp_bytes, vlan_id, out_packets_lan_session_manager,\
zero_window_maximum_duration, source_ip_address, fast_retransmits, rtt,\
mapped_tunnel_ve_port, hcl_flow, dscp, slot, crc_errors, lan_interface,\
local_host_mss, start_time, out_tcp_bytes, out_bytes_average,\
slow_retransmits, out_drops_lan_session_manager, out_bytes_lan_compression,\
flow_index, mapped_tunnel_qos, in_tcp_packets, tcp_out_of_order_packets,\
end_time, remote_host_mss, protocol)
*Show lan_flow_statistics*
Example Usage of the Method::
ret = lan_flow_statistics_show.show_lan_flow_statistics(session,\
out_bytes_wan_compression, in_packets_lan_session_manager, vlan_priority,\
traffic_control_list_name, out_bytes_lan_session_manager, tcp_retransmits,\
zero_window_count, active_flow, in_drops_lan_session_manager,\
destination_port, in_bytes_average, destination_ip_address,\
in_bytes_lan_session_manager, in_bytes_lan_compression, out_tcp_packets,\
duplicate_acknowledgement, source_port, dp_id, in_bytes_wan_compression,\
in_tcp_bytes, vlan_id, out_packets_lan_session_manager,\
zero_window_maximum_duration, source_ip_address, fast_retransmits, rtt,\
mapped_tunnel_ve_port, hcl_flow, dscp, slot, crc_errors, lan_interface,\
local_host_mss, start_time, out_tcp_bytes, out_bytes_average,\
slow_retransmits, out_drops_lan_session_manager, out_bytes_lan_compression,\
flow_index, mapped_tunnel_qos, in_tcp_packets, tcp_out_of_order_packets,\
end_time, remote_host_mss, protocol)
print(ret)
Details::
lan_flow_statisticsObj = lan_flow_statistics()
\
lan_flow_statisticsObj.set_out_bytes_wan_compression(\
out_bytes_wan_compression)
\
lan_flow_statisticsObj.set_in_packets_lan_session_manager(\
in_packets_lan_session_manager)
lan_flow_statisticsObj.set_vlan_priority(vlan_priority)
\
lan_flow_statisticsObj.set_traffic_control_list_name(\
traffic_control_list_name)
\
lan_flow_statisticsObj.set_out_bytes_lan_session_manager(\
out_bytes_lan_session_manager)
lan_flow_statisticsObj.set_tcp_retransmits(tcp_retransmits)
lan_flow_statisticsObj.set_zero_window_count(zero_window_count)
lan_flow_statisticsObj.set_active_flow(active_flow)
\
lan_flow_statisticsObj.set_in_drops_lan_session_manager(\
in_drops_lan_session_manager)
lan_flow_statisticsObj.set_destination_port(destination_port)
lan_flow_statisticsObj.set_in_bytes_average(in_bytes_average)
\
lan_flow_statisticsObj.set_destination_ip_address(destination_ip_address)
\
lan_flow_statisticsObj.set_in_bytes_lan_session_manager(\
in_bytes_lan_session_manager)
\
lan_flow_statisticsObj.set_in_bytes_lan_compression(\
in_bytes_lan_compression)
lan_flow_statisticsObj.set_out_tcp_packets(out_tcp_packets)
\
lan_flow_statisticsObj.set_duplicate_acknowledgement(\
duplicate_acknowledgement)
lan_flow_statisticsObj.set_source_port(source_port)
lan_flow_statisticsObj.set_dp_id(dp_id)
\
lan_flow_statisticsObj.set_in_bytes_wan_compression(\
in_bytes_wan_compression)
lan_flow_statisticsObj.set_in_tcp_bytes(in_tcp_bytes)
lan_flow_statisticsObj.set_vlan_id(vlan_id)
\
lan_flow_statisticsObj.set_out_packets_lan_session_manager(\
out_packets_lan_session_manager)
\
lan_flow_statisticsObj.set_zero_window_maximum_duration(\
zero_window_maximum_duration)
lan_flow_statisticsObj.set_source_ip_address(source_ip_address)
lan_flow_statisticsObj.set_fast_retransmits(fast_retransmits)
lan_flow_statisticsObj.set_rtt(rtt)
\
lan_flow_statisticsObj.set_mapped_tunnel_ve_port(mapped_tunnel_ve_port)
lan_flow_statisticsObj.set_hcl_flow(hcl_flow)
lan_flow_statisticsObj.set_dscp(dscp)
lan_flow_statisticsObj.set_slot(slot)
lan_flow_statisticsObj.set_crc_errors(crc_errors)
lan_flow_statisticsObj.set_lan_interface(lan_interface)
lan_flow_statisticsObj.set_local_host_mss(local_host_mss)
lan_flow_statisticsObj.set_start_time(start_time)
lan_flow_statisticsObj.set_out_tcp_bytes(out_tcp_bytes)
lan_flow_statisticsObj.set_out_bytes_average(out_bytes_average)
lan_flow_statisticsObj.set_slow_retransmits(slow_retransmits)
\
lan_flow_statisticsObj.set_out_drops_lan_session_manager(\
out_drops_lan_session_manager)
\
lan_flow_statisticsObj.set_out_bytes_lan_compression(\
out_bytes_lan_compression)
lan_flow_statisticsObj.set_flow_index(flow_index)
lan_flow_statisticsObj.set_mapped_tunnel_qos(mapped_tunnel_qos)
lan_flow_statisticsObj.set_in_tcp_packets(in_tcp_packets)
\
lan_flow_statisticsObj.set_tcp_out_of_order_packets(\
tcp_out_of_order_packets)
lan_flow_statisticsObj.set_end_time(end_time)
lan_flow_statisticsObj.set_remote_host_mss(remote_host_mss)
lan_flow_statisticsObj.set_protocol(protocol)
ret = _show_lan_flow_statistics(session, lan_flow_statisticsObj)
print(ret)
**Inputs**
:param session: The session returned by the login.
:param out_bytes_wan_compression: Total bytes sent compression engine on\
WAN.
:param in_packets_lan_session_manager: Total packets received by LAN\
session manager.
:param vlan_priority: Specifies the VLAN priority associated with the\
flow.
:param traffic_control_list_name: The traffic-control-list name matching\
the flow filter to allow the traffic.
:param out_bytes_lan_session_manager: Total bytes sent by LAN session\
manager.
:param tcp_retransmits: TCP retransmits /lost packets.
:param zero_window_count: The count of TCP zero window encountered.
:param active_flow: Indicates that LAN flow is currently active. true:\
The flow is active. false: The flow is not active.
:param in_drops_lan_session_manager: The number of drops at the ingress\
from LAN session manager.
:param destination_port: Remote destination port number of the LAN flow.
:param in_bytes_average: The throughput in bps for packets received via an\
extension tunnel over WAN per 30s average.
:param destination_ip_address: Destination IP address corresponding to the\
LAN flow.
:param in_bytes_lan_session_manager: Total bytes received by LAN session\
manager.
:param in_bytes_lan_compression: Total bytes received by compression\
engine from LAN.
:param out_tcp_packets: Total TCP packets sent.
:param duplicate_acknowledgement: TCP duplicate ACK received.
:param source_port: Source port number of the LAN flow.
:param dp_id: Extension Data Path Processor ID associated with flow. Based\
on platform either it will have a single DP or dual DP. In case of\
single DP only DP0 is supported, and in case of dual DP both DP0 and\
DP1 are supported 0 : DP0 1 : DP1.
:param in_bytes_wan_compression: Total bytes received by compression\
engine from WAN.
:param in_tcp_bytes: Total bytes received.
:param vlan_id: Specifies the VLAN ID associated with the flow. When not\
set, this value will show up as 0.
:param out_packets_lan_session_manager: Total packets sent by LAN session\
manager.
:param zero_window_maximum_duration: The maximum of zero window duration\
encountered.
:param source_ip_address: Source IP address corresponding to the LAN flow.
:param fast_retransmits: TCP fast retransmits count.
:param rtt: round trip time.
:param mapped_tunnel_ve_port: The VE port of the extension-tunnel\
interface.
:param hcl_flow: Indicates that LAN flow is in HCL. true: The flow is in\
HCL. false: The flow is not in HCL.
:param dscp: DSCP value for the LAN flow.
:param slot: In case of non-chassis system, the slot number is always 0.\
In case of chassis system, it is the slot number of chassis in which\
the extension blade is inserted in. In case of chassis, slot number is\
non-zero value.
:param crc_errors: Number of CRC errors encountered.
:param lan_interface: The interface corresponding to the traffic. This\
could be either a GE port or a LAG name associated with the LAN flow.
:param local_host_mss: The local-host-mss is the MSS of the TCP connection\
at the LAN ingress side connected host.
:param start_time: Indicates the LAN flow start time.
:param out_tcp_bytes: Total bytes sent.
:param out_bytes_average: The throughput in bps for packets sent over the\
extension tunnel on WAN per 30s average.
:param slow_retransmits: TCP slow retransmits count.
:param out_drops_lan_session_manager: The number of drops at the egress\
from LAN session manager.
:param out_bytes_lan_compression: Total bytes sent by compression engine\
on LAN.
:param flow_index: flow index associated with the LAN flow. This is a\
dynamic index associated with the LAN flow. Depending on the LAN flow\
behavior the index may change and also can get reused after some time\
but at any given time they will be unique.
:param mapped_tunnel_qos: The IP priority QOS associated with the flow.
:param in_tcp_packets: Total TCP packets received.
:param tcp_out_of_order_packets: TCP total out of order packets.
:param end_time: Indicates the LAN flow end time.
:param remote_host_mss: The remote-host-mss is the MSS of the TCP\
connection at peer extension tunnel endpoint connected host to its the\
LAN ingress side.
:param protocol: Describes that the Layer 4 protocol of the flow.
**Output**
:rtype: None or one/more instance of class lan_flow_statistics on Success \
or a dictionary with error.
"""
# Start utils imports
import sys
from pyfos import pyfos_auth
from pyfos import pyfos_util
from pyfos.pyfos_brocade_extension import lan_flow_statistics
from pyfos.utils import brcd_util
# End module imports
def _show_lan_flow_statistics(session, lan_flow_statisticsObj):
objlist = lan_flow_statistics.get(session)
lan_flow_statisticslist = list()
if isinstance(objlist, lan_flow_statistics):
objlist = [objlist]
if isinstance(objlist, list):
for i in range(len(objlist)):
if lan_flow_statisticsObj.peek_out_bytes_wan_compression() is not\
None and\
lan_flow_statisticsObj.peek_out_bytes_wan_compression() !=\
objlist[i].peek_out_bytes_wan_compression():
continue
if lan_flow_statisticsObj.peek_in_packets_lan_session_manager() is\
not None and\
lan_flow_statisticsObj.peek_in_packets_lan_session_manager()\
!= objlist[i].peek_in_packets_lan_session_manager():
continue
if lan_flow_statisticsObj.peek_vlan_priority() is not None and\
lan_flow_statisticsObj.peek_vlan_priority() !=\
objlist[i].peek_vlan_priority():
continue
if lan_flow_statisticsObj.peek_traffic_control_list_name() is not\
None and\
lan_flow_statisticsObj.peek_traffic_control_list_name() !=\
objlist[i].peek_traffic_control_list_name():
continue
if lan_flow_statisticsObj.peek_out_bytes_lan_session_manager() is\
not None and\
lan_flow_statisticsObj.peek_out_bytes_lan_session_manager()\
!= objlist[i].peek_out_bytes_lan_session_manager():
continue
if lan_flow_statisticsObj.peek_tcp_retransmits() is not None and\
lan_flow_statisticsObj.peek_tcp_retransmits() !=\
objlist[i].peek_tcp_retransmits():
continue
if lan_flow_statisticsObj.peek_zero_window_count() is not None and\
lan_flow_statisticsObj.peek_zero_window_count() !=\
objlist[i].peek_zero_window_count():
continue
if lan_flow_statisticsObj.peek_active_flow() is not None and\
lan_flow_statisticsObj.peek_active_flow() !=\
objlist[i].peek_active_flow():
continue
if lan_flow_statisticsObj.peek_in_drops_lan_session_manager() is\
not None and\
lan_flow_statisticsObj.peek_in_drops_lan_session_manager() !=\
objlist[i].peek_in_drops_lan_session_manager():
continue
if lan_flow_statisticsObj.peek_destination_port() is not None and\
lan_flow_statisticsObj.peek_destination_port() !=\
objlist[i].peek_destination_port():
continue
if lan_flow_statisticsObj.peek_in_bytes_average() is not None and\
lan_flow_statisticsObj.peek_in_bytes_average() !=\
objlist[i].peek_in_bytes_average():
continue
if lan_flow_statisticsObj.peek_destination_ip_address() is not\
None and lan_flow_statisticsObj.peek_destination_ip_address()\
!= objlist[i].peek_destination_ip_address():
continue
if lan_flow_statisticsObj.peek_in_bytes_lan_session_manager() is\
not None and\
lan_flow_statisticsObj.peek_in_bytes_lan_session_manager() !=\
objlist[i].peek_in_bytes_lan_session_manager():
continue
if lan_flow_statisticsObj.peek_in_bytes_lan_compression() is not\
None and\
lan_flow_statisticsObj.peek_in_bytes_lan_compression() !=\
objlist[i].peek_in_bytes_lan_compression():
continue
if lan_flow_statisticsObj.peek_out_tcp_packets() is not None and\
lan_flow_statisticsObj.peek_out_tcp_packets() !=\
objlist[i].peek_out_tcp_packets():
continue
if lan_flow_statisticsObj.peek_duplicate_acknowledgement() is not\
None and\
lan_flow_statisticsObj.peek_duplicate_acknowledgement() !=\
objlist[i].peek_duplicate_acknowledgement():
continue
if lan_flow_statisticsObj.peek_source_port() is not None and\
lan_flow_statisticsObj.peek_source_port() !=\
objlist[i].peek_source_port():
continue
if lan_flow_statisticsObj.peek_dp_id() is not None and\
lan_flow_statisticsObj.peek_dp_id() !=\
objlist[i].peek_dp_id():
continue
if lan_flow_statisticsObj.peek_in_bytes_wan_compression() is not\
None and\
lan_flow_statisticsObj.peek_in_bytes_wan_compression() !=\
objlist[i].peek_in_bytes_wan_compression():
continue
if lan_flow_statisticsObj.peek_in_tcp_bytes() is not None and\
lan_flow_statisticsObj.peek_in_tcp_bytes() !=\
objlist[i].peek_in_tcp_bytes():
continue
if lan_flow_statisticsObj.peek_vlan_id() is not None and\
lan_flow_statisticsObj.peek_vlan_id() !=\
objlist[i].peek_vlan_id():
continue
if lan_flow_statisticsObj.peek_out_packets_lan_session_manager()\
is not None and\
lan_flow_statisticsObj.peek_out_packets_lan_session_manager()\
!= objlist[i].peek_out_packets_lan_session_manager():
continue
if lan_flow_statisticsObj.peek_zero_window_maximum_duration() is\
not None and\
lan_flow_statisticsObj.peek_zero_window_maximum_duration() !=\
objlist[i].peek_zero_window_maximum_duration():
continue
if lan_flow_statisticsObj.peek_source_ip_address() is not None and\
lan_flow_statisticsObj.peek_source_ip_address() !=\
objlist[i].peek_source_ip_address():
continue
if lan_flow_statisticsObj.peek_fast_retransmits() is not None and\
lan_flow_statisticsObj.peek_fast_retransmits() !=\
objlist[i].peek_fast_retransmits():
continue
if lan_flow_statisticsObj.peek_rtt() is not None and\
lan_flow_statisticsObj.peek_rtt() != objlist[i].peek_rtt():
continue
if lan_flow_statisticsObj.peek_mapped_tunnel_ve_port() is not None\
and lan_flow_statisticsObj.peek_mapped_tunnel_ve_port() !=\
objlist[i].peek_mapped_tunnel_ve_port():
continue
if lan_flow_statisticsObj.peek_hcl_flow() is not None and\
lan_flow_statisticsObj.peek_hcl_flow() !=\
objlist[i].peek_hcl_flow():
continue
if lan_flow_statisticsObj.peek_dscp() is not None and\
lan_flow_statisticsObj.peek_dscp() != objlist[i].peek_dscp():
continue
if lan_flow_statisticsObj.peek_slot() is not None and\
lan_flow_statisticsObj.peek_slot() != objlist[i].peek_slot():
continue
if lan_flow_statisticsObj.peek_crc_errors() is not None and\
lan_flow_statisticsObj.peek_crc_errors() !=\
objlist[i].peek_crc_errors():
continue
if lan_flow_statisticsObj.peek_lan_interface() is not None and\
lan_flow_statisticsObj.peek_lan_interface() !=\
objlist[i].peek_lan_interface():
continue
if lan_flow_statisticsObj.peek_local_host_mss() is not None and\
lan_flow_statisticsObj.peek_local_host_mss() !=\
objlist[i].peek_local_host_mss():
continue
if lan_flow_statisticsObj.peek_start_time() is not None and\
lan_flow_statisticsObj.peek_start_time() !=\
objlist[i].peek_start_time():
continue
if lan_flow_statisticsObj.peek_out_tcp_bytes() is not None and\
lan_flow_statisticsObj.peek_out_tcp_bytes() !=\
objlist[i].peek_out_tcp_bytes():
continue
if lan_flow_statisticsObj.peek_out_bytes_average() is not None and\
lan_flow_statisticsObj.peek_out_bytes_average() !=\
objlist[i].peek_out_bytes_average():
continue
if lan_flow_statisticsObj.peek_slow_retransmits() is not None and\
lan_flow_statisticsObj.peek_slow_retransmits() !=\
objlist[i].peek_slow_retransmits():
continue
if lan_flow_statisticsObj.peek_out_drops_lan_session_manager() is\
not None and\
lan_flow_statisticsObj.peek_out_drops_lan_session_manager()\
!= objlist[i].peek_out_drops_lan_session_manager():
continue
if lan_flow_statisticsObj.peek_out_bytes_lan_compression() is not\
None and\
lan_flow_statisticsObj.peek_out_bytes_lan_compression() !=\
objlist[i].peek_out_bytes_lan_compression():
continue
if lan_flow_statisticsObj.peek_flow_index() is not None and\
lan_flow_statisticsObj.peek_flow_index() !=\
objlist[i].peek_flow_index():
continue
if lan_flow_statisticsObj.peek_mapped_tunnel_qos() is not None and\
lan_flow_statisticsObj.peek_mapped_tunnel_qos() !=\
objlist[i].peek_mapped_tunnel_qos():
continue
if lan_flow_statisticsObj.peek_in_tcp_packets() is not None and\
lan_flow_statisticsObj.peek_in_tcp_packets() !=\
objlist[i].peek_in_tcp_packets():
continue
if lan_flow_statisticsObj.peek_tcp_out_of_order_packets() is not\
None and\
lan_flow_statisticsObj.peek_tcp_out_of_order_packets() !=\
objlist[i].peek_tcp_out_of_order_packets():
continue
if lan_flow_statisticsObj.peek_end_time() is not None and\
lan_flow_statisticsObj.peek_end_time() !=\
objlist[i].peek_end_time():
continue
if lan_flow_statisticsObj.peek_remote_host_mss() is not None and\
lan_flow_statisticsObj.peek_remote_host_mss() !=\
objlist[i].peek_remote_host_mss():
continue
if lan_flow_statisticsObj.peek_protocol() is not None and\
lan_flow_statisticsObj.peek_protocol() !=\
objlist[i].peek_protocol():
continue
lan_flow_statisticslist.append(objlist[i])
else:
return objlist
return lan_flow_statisticslist
def show_lan_flow_statistics(session, out_bytes_wan_compression=None,
in_packets_lan_session_manager=None,
vlan_priority=None,
traffic_control_list_name=None,
out_bytes_lan_session_manager=None,
tcp_retransmits=None, zero_window_count=None,
active_flow=None,
in_drops_lan_session_manager=None,
destination_port=None, in_bytes_average=None,
destination_ip_address=None,
in_bytes_lan_session_manager=None,
in_bytes_lan_compression=None,
out_tcp_packets=None,
duplicate_acknowledgement=None,
source_port=None, dp_id=None,
in_bytes_wan_compression=None,
in_tcp_bytes=None, vlan_id=None,
out_packets_lan_session_manager=None,
zero_window_maximum_duration=None,
source_ip_address=None, fast_retransmits=None,
rtt=None, mapped_tunnel_ve_port=None,
hcl_flow=None, dscp=None, slot=None,
crc_errors=None, lan_interface=None,
local_host_mss=None, start_time=None,
out_tcp_bytes=None, out_bytes_average=None,
slow_retransmits=None,
out_drops_lan_session_manager=None,
out_bytes_lan_compression=None, flow_index=None,
mapped_tunnel_qos=None, in_tcp_packets=None,
tcp_out_of_order_packets=None, end_time=None,
remote_host_mss=None, protocol=None):
lan_flow_statisticsObj = lan_flow_statistics()
lan_flow_statisticsObj.set_out_bytes_wan_compression(
out_bytes_wan_compression)
lan_flow_statisticsObj.set_in_packets_lan_session_manager(
in_packets_lan_session_manager)
lan_flow_statisticsObj.set_vlan_priority(vlan_priority)
lan_flow_statisticsObj.set_traffic_control_list_name(
traffic_control_list_name)
lan_flow_statisticsObj.set_out_bytes_lan_session_manager(
out_bytes_lan_session_manager)
lan_flow_statisticsObj.set_tcp_retransmits(tcp_retransmits)
lan_flow_statisticsObj.set_zero_window_count(zero_window_count)
lan_flow_statisticsObj.set_active_flow(active_flow)
lan_flow_statisticsObj.set_in_drops_lan_session_manager(
in_drops_lan_session_manager)
lan_flow_statisticsObj.set_destination_port(destination_port)
lan_flow_statisticsObj.set_in_bytes_average(in_bytes_average)
lan_flow_statisticsObj.set_destination_ip_address(destination_ip_address)
lan_flow_statisticsObj.set_in_bytes_lan_session_manager(
in_bytes_lan_session_manager)
lan_flow_statisticsObj.set_in_bytes_lan_compression(
in_bytes_lan_compression)
lan_flow_statisticsObj.set_out_tcp_packets(out_tcp_packets)
lan_flow_statisticsObj.set_duplicate_acknowledgement(
duplicate_acknowledgement)
lan_flow_statisticsObj.set_source_port(source_port)
lan_flow_statisticsObj.set_dp_id(dp_id)
lan_flow_statisticsObj.set_in_bytes_wan_compression(
in_bytes_wan_compression)
lan_flow_statisticsObj.set_in_tcp_bytes(in_tcp_bytes)
lan_flow_statisticsObj.set_vlan_id(vlan_id)
lan_flow_statisticsObj.set_out_packets_lan_session_manager(
out_packets_lan_session_manager)
lan_flow_statisticsObj.set_zero_window_maximum_duration(
zero_window_maximum_duration)
lan_flow_statisticsObj.set_source_ip_address(source_ip_address)
lan_flow_statisticsObj.set_fast_retransmits(fast_retransmits)
lan_flow_statisticsObj.set_rtt(rtt)
lan_flow_statisticsObj.set_mapped_tunnel_ve_port(mapped_tunnel_ve_port)
lan_flow_statisticsObj.set_hcl_flow(hcl_flow)
lan_flow_statisticsObj.set_dscp(dscp)
lan_flow_statisticsObj.set_slot(slot)
lan_flow_statisticsObj.set_crc_errors(crc_errors)
lan_flow_statisticsObj.set_lan_interface(lan_interface)
lan_flow_statisticsObj.set_local_host_mss(local_host_mss)
lan_flow_statisticsObj.set_start_time(start_time)
lan_flow_statisticsObj.set_out_tcp_bytes(out_tcp_bytes)
lan_flow_statisticsObj.set_out_bytes_average(out_bytes_average)
lan_flow_statisticsObj.set_slow_retransmits(slow_retransmits)
lan_flow_statisticsObj.set_out_drops_lan_session_manager(
out_drops_lan_session_manager)
lan_flow_statisticsObj.set_out_bytes_lan_compression(
out_bytes_lan_compression)
lan_flow_statisticsObj.set_flow_index(flow_index)
lan_flow_statisticsObj.set_mapped_tunnel_qos(mapped_tunnel_qos)
lan_flow_statisticsObj.set_in_tcp_packets(in_tcp_packets)
lan_flow_statisticsObj.set_tcp_out_of_order_packets(
tcp_out_of_order_packets)
lan_flow_statisticsObj.set_end_time(end_time)
lan_flow_statisticsObj.set_remote_host_mss(remote_host_mss)
lan_flow_statisticsObj.set_protocol(protocol)
return _show_lan_flow_statistics(session, lan_flow_statisticsObj)
def validate(lan_flow_statisticsObj):
if lan_flow_statisticsObj.peek_out_bytes_wan_compression() is None or\
lan_flow_statisticsObj.peek_in_packets_lan_session_manager() is None\
or lan_flow_statisticsObj.peek_vlan_priority() is None or\
lan_flow_statisticsObj.peek_traffic_control_list_name() is None or\
lan_flow_statisticsObj.peek_out_bytes_lan_session_manager() is None\
or lan_flow_statisticsObj.peek_tcp_retransmits() is None or\
lan_flow_statisticsObj.peek_zero_window_count() is None or\
lan_flow_statisticsObj.peek_active_flow() is None or\
lan_flow_statisticsObj.peek_in_drops_lan_session_manager() is None or\
lan_flow_statisticsObj.peek_destination_port() is None or\
lan_flow_statisticsObj.peek_in_bytes_average() is None or\
lan_flow_statisticsObj.peek_destination_ip_address() is None or\
lan_flow_statisticsObj.peek_in_bytes_lan_session_manager() is None or\
lan_flow_statisticsObj.peek_in_bytes_lan_compression() is None or\
lan_flow_statisticsObj.peek_out_tcp_packets() is None or\
lan_flow_statisticsObj.peek_duplicate_acknowledgement() is None or\
lan_flow_statisticsObj.peek_source_port() is None or\
lan_flow_statisticsObj.peek_dp_id() is None or\
lan_flow_statisticsObj.peek_in_bytes_wan_compression() is None or\
lan_flow_statisticsObj.peek_in_tcp_bytes() is None or\
lan_flow_statisticsObj.peek_vlan_id() is None or\
lan_flow_statisticsObj.peek_out_packets_lan_session_manager() is None\
or lan_flow_statisticsObj.peek_zero_window_maximum_duration() is None\
or lan_flow_statisticsObj.peek_source_ip_address() is None or\
lan_flow_statisticsObj.peek_fast_retransmits() is None or\
lan_flow_statisticsObj.peek_rtt() is None or\
lan_flow_statisticsObj.peek_mapped_tunnel_ve_port() is None or\
lan_flow_statisticsObj.peek_hcl_flow() is None or\
lan_flow_statisticsObj.peek_dscp() is None or\
lan_flow_statisticsObj.peek_slot() is None or\
lan_flow_statisticsObj.peek_crc_errors() is None or\
lan_flow_statisticsObj.peek_lan_interface() is None or\
lan_flow_statisticsObj.peek_local_host_mss() is None or\
lan_flow_statisticsObj.peek_start_time() is None or\
lan_flow_statisticsObj.peek_out_tcp_bytes() is None or\
lan_flow_statisticsObj.peek_out_bytes_average() is None or\
lan_flow_statisticsObj.peek_slow_retransmits() is None or\
lan_flow_statisticsObj.peek_out_drops_lan_session_manager() is None\
or lan_flow_statisticsObj.peek_out_bytes_lan_compression() is None or\
lan_flow_statisticsObj.peek_flow_index() is None or\
lan_flow_statisticsObj.peek_mapped_tunnel_qos() is None or\
lan_flow_statisticsObj.peek_in_tcp_packets() is None or\
lan_flow_statisticsObj.peek_tcp_out_of_order_packets() is None or\
lan_flow_statisticsObj.peek_end_time() is None or\
lan_flow_statisticsObj.peek_remote_host_mss() is None or\
lan_flow_statisticsObj.peek_protocol() is None:
return 0
return 0
def main(argv):
filters = ["out_bytes_wan_compression", "in_packets_lan_session_manager",
"vlan_priority", "traffic_control_list_name",
"out_bytes_lan_session_manager", "tcp_retransmits",
"zero_window_count", "active_flow",
"in_drops_lan_session_manager", "destination_port",
"in_bytes_average", "destination_ip_address",
"in_bytes_lan_session_manager", "in_bytes_lan_compression",
"out_tcp_packets", "duplicate_acknowledgement", "source_port",
"mapped_tunnel", "dp_id", "in_bytes_wan_compression",
"in_tcp_bytes", "vlan_id", "out_packets_lan_session_manager",
"zero_window_maximum_duration", "source_ip_address",
"fast_retransmits", "rtt", "mapped_tunnel_ve_port",
"hcl_flow", "dscp", "slot", "crc_errors", "lan_interface",
"local_host_mss", "start_time", "out_tcp_bytes",
"out_bytes_average", "slow_retransmits",
"out_drops_lan_session_manager", "out_bytes_lan_compression",
"flow_index", "mapped_tunnel_qos", "in_tcp_packets",
"tcp_out_of_order_packets", "end_time", "remote_host_mss",
"protocol"]
inputs = brcd_util.parse(argv, lan_flow_statistics, filters, validate)
session = brcd_util.getsession(inputs)
result = _show_lan_flow_statistics(session, inputs['utilobject'])
pyfos_util.response_print(result)
pyfos_auth.logout(session)
if __name__ == "__main__":
main(sys.argv[1:])
| 52.46477
| 79
| 0.699734
| 5,094
| 38,719
| 4.927562
| 0.061837
| 0.080594
| 0.18804
| 0.131947
| 0.865782
| 0.841401
| 0.803076
| 0.755587
| 0.676945
| 0.601251
| 0
| 0.001574
| 0.228802
| 38,719
| 737
| 80
| 52.535957
| 0.839015
| 0.439242
| 0
| 0.160526
| 0
| 0
| 0.037532
| 0.019513
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010526
| false
| 0
| 0.013158
| 0
| 0.036842
| 0.002632
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
61910dccf21a1aa4ad3a5bb0f0360a32ac8e7f02
| 115
|
py
|
Python
|
Ethanyan_mall/Ethanyan_mall/apps/goods/tests.py
|
Jaylen0829/E-commerce-sites
|
faa4443c11d1534642c8dc9f8262f818f489c554
|
[
"MIT"
] | 17
|
2019-01-22T00:14:40.000Z
|
2022-02-03T12:29:49.000Z
|
Ethanyan_mall/Ethanyan_mall/apps/goods/tests.py
|
Jaylen0829/E-commerce-sites
|
faa4443c11d1534642c8dc9f8262f818f489c554
|
[
"MIT"
] | 1
|
2020-06-28T15:16:38.000Z
|
2020-08-03T15:34:14.000Z
|
Ethanyan_mall/Ethanyan_mall/apps/goods/tests.py
|
EthanYan6/E-commerce-sites
|
faa4443c11d1534642c8dc9f8262f818f489c554
|
[
"MIT"
] | 11
|
2020-02-24T00:22:08.000Z
|
2022-03-26T17:03:40.000Z
|
from django.test import TestCase
from django.core.files.storage import FileSystemStorage
# Create your tests here.
| 28.75
| 55
| 0.834783
| 16
| 115
| 6
| 0.8125
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113043
| 115
| 3
| 56
| 38.333333
| 0.941176
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
61a636757fd76f3de493f20cbc5e957624af9e38
| 77
|
py
|
Python
|
tests/common/ping/main.py
|
yojagad/azure-functions-python-worker
|
d5a1587a4ccf56af64f211a64f0b7a3d6cf976c9
|
[
"MIT"
] | null | null | null |
tests/common/ping/main.py
|
yojagad/azure-functions-python-worker
|
d5a1587a4ccf56af64f211a64f0b7a3d6cf976c9
|
[
"MIT"
] | null | null | null |
tests/common/ping/main.py
|
yojagad/azure-functions-python-worker
|
d5a1587a4ccf56af64f211a64f0b7a3d6cf976c9
|
[
"MIT"
] | null | null | null |
import azure.functions as func
def main(req: func.HttpRequest):
return
| 12.833333
| 32
| 0.74026
| 11
| 77
| 5.181818
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 77
| 5
| 33
| 15.4
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
9cea991f333e6c86dc80eefec9552687360de60e
| 137
|
py
|
Python
|
dcp/__init__.py
|
kvh/dcp
|
1332b0fa44e755202d5f13342e834fe165b36ca5
|
[
"BSD-3-Clause"
] | 3
|
2021-04-13T22:12:34.000Z
|
2021-07-17T19:01:08.000Z
|
dcp/__init__.py
|
kvh/dcp
|
1332b0fa44e755202d5f13342e834fe165b36ca5
|
[
"BSD-3-Clause"
] | 1
|
2021-04-13T22:12:21.000Z
|
2021-04-13T22:12:21.000Z
|
dcp/__init__.py
|
kvh/dcp
|
1332b0fa44e755202d5f13342e834fe165b36ca5
|
[
"BSD-3-Clause"
] | null | null | null |
from dcp.data_copy import *
from dcp.data_format import *
from dcp.storage import *
from .logging import * # Disable logger by default
| 22.833333
| 51
| 0.766423
| 21
| 137
| 4.904762
| 0.571429
| 0.203884
| 0.213592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167883
| 137
| 5
| 52
| 27.4
| 0.903509
| 0.182482
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
1409eca370e7ee40e270168bfa913fcfff9f11f1
| 5,635
|
py
|
Python
|
tests/test_div_shortcut.py
|
Pitmairen/hamlish-jinja
|
f8fdbddf2f444124c6fc69d1eb11603da2838093
|
[
"BSD-3-Clause"
] | 45
|
2015-04-20T20:17:24.000Z
|
2022-02-13T19:31:53.000Z
|
tests/test_div_shortcut.py
|
Pitmairen/hamlish-jinja
|
f8fdbddf2f444124c6fc69d1eb11603da2838093
|
[
"BSD-3-Clause"
] | 3
|
2016-03-31T13:11:15.000Z
|
2018-10-09T02:00:24.000Z
|
tests/test_div_shortcut.py
|
Pitmairen/hamlish-jinja
|
f8fdbddf2f444124c6fc69d1eb11603da2838093
|
[
"BSD-3-Clause"
] | 4
|
2015-10-04T15:27:40.000Z
|
2019-06-13T11:47:00.000Z
|
# -*- coding: utf-8 -*-
import unittest
from hamlish_jinja import Hamlish, Output
import testing_base
class TestSyntax(testing_base.TestCase):
def setUp(self):
self.hamlish = Hamlish(
Output(indent_string=' ', newline_string='\n'),
use_div_shortcut=True)
def test_div_class_shortcut(self):
s = self._h('''
.test
Test
''')
r = '''\
<div class="test">
Test
</div>\
'''
self.assertEqual(s, r)
def test_div_class_shortcut_inline_data(self):
s = self._h('''.test << Test''')
r = '''<div class="test">Test</div>'''
self.assertEqual(s, r)
def test_div_id_shortcut(self):
s = self._h('''
#test
Test
''')
r = '''\
<div id="test">
Test
</div>\
'''
self.assertEqual(s, r)
def test_div_id_shortcut_inline_data(self):
s = self._h('''#test << Test''')
r = '''<div id="test">Test</div>'''
self.assertEqual(s, r)
def test_div_multiple_class_shortcut(self):
s = self._h('''
.test.test2
Test
''')
r = '''\
<div class="test test2">
Test
</div>\
'''
self.assertEqual(s, r)
def test_div_multiple_class_shortcut_inline_data(self):
s = self._h('''.test.test2 << Test''')
r = '''<div class="test test2">Test</div>'''
self.assertEqual(s, r)
def test_div_multiple_id_shortcut(self):
s = self._h('''
#test#test2
Test
''')
r = '''\
<div id="test test2">
Test
</div>\
'''
self.assertEqual(s, r)
def test_div_multiple_id_shortcut_inline_data(self):
s = self._h('''#test#test2 << Test''')
r = '''<div id="test test2">Test</div>'''
self.assertEqual(s, r)
def test_div_mixed_class_id_shortcut(self):
s = self._h('''
#test.test
Test
.test#test
Test
.test#test.test2
Test
#test.test#test2
Test
#test#test2.test#test3.test2
Test
''')
r = '''\
<div id="test" class="test">
Test
</div>
<div class="test" id="test">
Test
</div>
<div class="test test2" id="test">
Test
</div>
<div id="test test2" class="test">
Test
</div>
<div id="test test2 test3" class="test test2">
Test
</div>\
'''
self.assertEqual(s, r)
def test_div_multiple_id_shortcut_inline_data(self):
s = self._h('''
#test.test << Test
.test#test << Test
.test.test2#test << Test
.test#test.test2 << Test
''')
r = '''\
<div id="test" class="test">Test</div>
<div class="test" id="test">Test</div>
<div class="test test2" id="test">Test</div>
<div class="test test2" id="test">Test</div>\
'''
self.assertEqual(s, r)
self.assertEqual(s, r)
def test_mixed_shortcut_normal(self):
s = self._h('''
#test class="test"
Test
''')
r = '''\
<div id="test" class="test">
Test
</div>'''
self.assertEqual(s, r)
def test_mixed_shortcut_normal2(self):
s = self._h('''
.test.test2 id="test"
Test
''')
r = '''\
<div class="test test2" id="test">
Test
</div>'''
self.assertEqual(s, r)
def test_mixed_shortcut_normal_inline_data(self):
s = self._h('''#test class="test" << Test''')
r = '''<div id="test" class="test">Test</div>'''
self.assertEqual(s, r)
def test_mixed_shortcut_normal_inline_data2(self):
s = self._h('''.test.test2 id="test" << Test''')
r = '''<div class="test test2" id="test">Test</div>'''
self.assertEqual(s, r)
def test_shortcut_parenthetic_normal(self):
s = self._h('.test(foo="bar")\n Test')
r = '<div class="test" foo="bar">\n Test\n</div>'
self.assertEqual(s, r)
def test_shortcut_parenthetic_inline(self):
s = self._h('.test(foo="bar") << Test')
r = '<div class="test" foo="bar">Test</div>'
self.assertEqual(s, r)
def test_shortcut_parenthetic_normal2(self):
s = self._h('.test.test-1(foo="bar")\n Test')
r = '<div class="test test-1" foo="bar">\n Test\n</div>'
self.assertEqual(s, r)
def test_shortcut_parenthetic_inline2(self):
s = self._h('.test.test-1(foo="bar") << Test')
r = '<div class="test test-1" foo="bar">Test</div>'
self.assertEqual(s, r)
def test_shortcut_parenthetic_inline3(self):
s = self._h('#foo.test.test-1(foo="bar") << Test')
r = '<div id="foo" class="test test-1" foo="bar">Test</div>'
self.assertEqual(s, r)
def test_shortcut_parenthetic_self_closing(self):
s = self._h('.test(foo="bar").')
r = '<div class="test" foo="bar" />'
self.assertEqual(s, r)
def test_shortcut_parenthetic_self_closing2(self):
s = self._h('.test.test-1(foo="bar").')
r = '<div class="test test-1" foo="bar" />'
self.assertEqual(s, r)
def test_shortcut_parenthetic_self_closing3(self):
s = self._h('#foo.test.test-1(foo="bar").')
r = '<div id="foo" class="test test-1" foo="bar" />'
self.assertEqual(s, r)
def test_nested_with_shortcuts(self):
s = self._h('''
%p -> %span.test
Test
''')
r = '''\
<p><span class="test">
Test
</span></p>'''
self.assertEqual(s, r)
def test_nested_with_shortcuts2(self):
s = self._h('''
%p -> %span.test << Test
''')
r = '''\
<p><span class="test">Test</span></p>'''
self.assertEqual(s, r)
def test_nested_with_shortcuts3(self):
s = self._h('''
%p -> %span.test id="test" -> Test
''')
r = '''\
<p><span class="test" id="test">Test</span></p>'''
self.assertEqual(s, r)
if __name__ == '__main__':
unittest.main()
| 23
| 68
| 0.561136
| 782
| 5,635
| 3.865729
| 0.075448
| 0.156136
| 0.137612
| 0.146212
| 0.894145
| 0.888521
| 0.873305
| 0.819716
| 0.792259
| 0.737678
| 0
| 0.010763
| 0.241526
| 5,635
| 244
| 69
| 23.094262
| 0.696537
| 0.003727
| 0
| 0.53
| 0
| 0.005
| 0.379544
| 0.053101
| 0
| 0
| 0
| 0
| 0.13
| 1
| 0.13
| false
| 0
| 0.015
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1413d4166e527d46d63e81bff2c72b8978b6445e
| 5,564
|
py
|
Python
|
WaveletApp/Wavelet_One.py
|
ChairOfStructuralMechanicsTUM/Mechanics_Apps
|
b064a42d4df3fa9bde62a5cff9cb27ca61b0127c
|
[
"MIT"
] | 11
|
2017-05-06T17:05:29.000Z
|
2020-11-12T09:26:47.000Z
|
WaveletApp/Wavelet_One.py
|
ChairOfStructuralMechanicsTUM/Mechanics_Apps
|
b064a42d4df3fa9bde62a5cff9cb27ca61b0127c
|
[
"MIT"
] | 49
|
2017-04-20T11:26:11.000Z
|
2020-05-29T13:18:06.000Z
|
WaveletApp/Wavelet_One.py
|
ChairOfStructuralMechanicsTUM/Mechanics_Apps
|
b064a42d4df3fa9bde62a5cff9cb27ca61b0127c
|
[
"MIT"
] | 4
|
2017-02-14T12:55:34.000Z
|
2022-01-12T15:07:07.000Z
|
import numpy as np
from scipy.integrate import quad
from math import sin, cos, pi, exp
def Find_Heaviside_Wavelet_One(T0,amp,Resolut):
"""
This function computes the wavelet transform of a heaviside function
input:
T0: float, representing the time where the step happen
amp: float, representing the amplitude of the step
Resolut: int, representing the resolution of the result
return:
a: array_like of size (1xResolut), discretization of the y-axis
b: array_like of size (1xResolut), discretization of the x-axis
W: array_like of size (ResolutxResolut), matrix containing the wavelet transform value at each (a,b)
"""
a = np.linspace(0.1, 5, Resolut)
b = np.linspace(0.1, 5, Resolut)
W = np.zeros((Resolut, Resolut))
for i in range (0,Resolut):
for j in range (0,Resolut):
def integrand1(t):
output = a[i]**-0.5 * amp * (t-b[j])/a[i] * exp(-( (t-b[j])/a[i] )**2.0)
return output
W[i][j]=quad(integrand1, T0, 15)[0]
return a,b,W
def Find_Rectangular_Wavelet_One(T0,T1,amp,Resolut):
"""
This function computes the wavelet transform of a rectangular function
input:
T0: float, representing the time where the first step happen
T1: float, representing the time where the second step happen
amp: float, representing the amplitude of the step
Resolut: int, representing the resolution of the result
return:
a: array_like of size (1xResolut), discretization of the y-axis
b: array_like of size (1xResolut), discretization of the x-axis
W: array_like of size (ResolutxResolut), matrix containing the wavelet transform value at each (a,b)
"""
a=np.linspace(0.1,5,Resolut)
b=np.linspace(0.1,5,Resolut)
W=np.zeros((Resolut, Resolut))
for i in range (0,Resolut):
for j in range (0,Resolut):
def integrand1(t):
output = a[i]**-0.5 * amp * (t-b[j])/a[i] * exp(-( (t-b[j])/a[i] )**2.0)
return output
W[i][j]=quad(integrand1, T0, T1)[0]
return a,b,W
def Find_Dirac_Wavelet_One(T0, amp,Resolut):
"""
This function computes the wavelet transform of a dirac function
input:
T0: float, representing the impulse happen
amp: float, representing the amplitude of the impulse
Resolut: int, representing the resolution of the result
return:
a: array_like of size (1xResolut), discretization of the y-axis
b: array_like of size (1xResolut), discretization of the x-axis
W: array_like of size (ResolutxResolut), matrix containing the wavelet transform value at each (a,b)
"""
a=np.linspace(0.1,5,Resolut)
b=np.linspace(0.1,5,Resolut)
W=np.zeros((Resolut, Resolut))
for i in range (0,Resolut):
for j in range (0,Resolut):
W[i][j]= a[i]**-0.5 * amp * (T0-b[j])/a[i] * exp(-( (T0-b[j])/a[i] )**2.0)
return a,b,W
def Find_Trig_Wavelet_One(index, frequency, Resolut):
"""
This function computes the wavelet transform of sin and cos functions
input:
index: int, defining which trigonometric function to be used {0 -> sin, 1 -> cos}
frequency: float, representing the the frequency of the trigonometric function
Resolut: int, representing the resolution of the result
return:
a: array_like of size (1xResolut), discretization of the y-axis
b: array_like of size (1xResolut), discretization of the x-axis
W: array_like of size (ResolutxResolut), matrix containing the wavelet transform value at each (a,b)
"""
a=np.linspace(0.1,5,Resolut)
b=np.linspace(0.1,5,Resolut)
W=np.zeros((Resolut, Resolut))
if (index == 0):
for i in range (0,Resolut):
for j in range (0,Resolut):
def integrand1(t):
output = a[i]**-0.5 * np.sin(2 * np.pi * frequency * t) * (t-b[j])/a[i] * exp(-( (t-b[j])/a[i] )**2.0)
return output
W[i][j]=quad(integrand1, -10, 15)[0]
else:
for i in range (0,Resolut):
for j in range (0,Resolut):
def integrand1(t):
output = a[i]**-0.5 * np.cos(2 * np.pi * frequency * t) * (t-b[j])/a[i] * exp(-( (t-b[j])/a[i] )**2.0)
return output
W[i][j]=quad(integrand1, -10, 15)[0]
return a,b,W
def Find_Custom_Wavelet_One(user_func,Resolut):
"""
This function computes the wavelet transform of sin and cos functions
input:
user_func: string, defining the user defined function
Resolut: int, representing the resolution of the result
return:
a: array_like of size (1xResolut), discretization of the y-axis
b: array_like of size (1xResolut), discretization of the x-axis
W: array_like of size (ResolutxResolut), matrix containing the wavelet transform value at each (a,b)
"""
a=np.linspace(0.1,5,Resolut)
b=np.linspace(0.1,5,Resolut)
W=np.zeros((Resolut, Resolut))
#make a list of safe functions
safe_dict = {
'sin' : sin,
'cos' : cos,
'pi' : pi,
'exp' : exp,
}
for i, a_i in enumerate(a):
for j, b_j in enumerate(b):
def integrand(t):
safe_dict['t'] = t
return a_i**-0.5 * eval(user_func, safe_dict) * (t-b_j)/a_i * exp(-( (t-b_j)/a_i )**2.0)
W[i][j]=quad(integrand, -10, 15)[0]
return a,b,W
| 38.638889
| 122
| 0.602444
| 847
| 5,564
| 3.90673
| 0.121606
| 0.02871
| 0.049864
| 0.067996
| 0.829858
| 0.824418
| 0.80417
| 0.788154
| 0.764581
| 0.736174
| 0
| 0.029573
| 0.276779
| 5,564
| 144
| 123
| 38.638889
| 0.792744
| 0.464774
| 0
| 0.608696
| 0
| 0
| 0.004375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.144928
| false
| 0
| 0.043478
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
143967ecdb807596bad84628a55decbf28df2bbb
| 53,787
|
py
|
Python
|
erc1155_deployer/vendor/fetchai/protocols/ledger_api/ledger_api_pb2.py
|
Ataxia123/agents-template
|
513ae6afe02b9972929e39a26e15e276f0ed5b1c
|
[
"Apache-2.0"
] | null | null | null |
erc1155_deployer/vendor/fetchai/protocols/ledger_api/ledger_api_pb2.py
|
Ataxia123/agents-template
|
513ae6afe02b9972929e39a26e15e276f0ed5b1c
|
[
"Apache-2.0"
] | null | null | null |
erc1155_deployer/vendor/fetchai/protocols/ledger_api/ledger_api_pb2.py
|
Ataxia123/agents-template
|
513ae6afe02b9972929e39a26e15e276f0ed5b1c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: ledger_api.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name="ledger_api.proto",
package="aea.fetchai.ledger_api.v0_1_0",
syntax="proto3",
serialized_options=None,
serialized_pb=b'\n\x10ledger_api.proto\x12\x1d\x61\x65\x61.fetchai.ledger_api.v0_1_0"\x86\x15\n\x10LedgerApiMessage\x12W\n\x07\x62\x61lance\x18\x05 \x01(\x0b\x32\x44.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Balance_PerformativeH\x00\x12S\n\x05\x65rror\x18\x06 \x01(\x0b\x32\x42.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Error_PerformativeH\x00\x12_\n\x0bget_balance\x18\x07 \x01(\x0b\x32H.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Balance_PerformativeH\x00\x12o\n\x13get_raw_transaction\x18\x08 \x01(\x0b\x32P.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Raw_Transaction_PerformativeH\x00\x12[\n\tget_state\x18\t \x01(\x0b\x32\x46.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_State_PerformativeH\x00\x12w\n\x17get_transaction_receipt\x18\n \x01(\x0b\x32T.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Transaction_Receipt_PerformativeH\x00\x12g\n\x0fraw_transaction\x18\x0b \x01(\x0b\x32L.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Raw_Transaction_PerformativeH\x00\x12w\n\x17send_signed_transaction\x18\x0c \x01(\x0b\x32T.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Send_Signed_Transaction_PerformativeH\x00\x12S\n\x05state\x18\r \x01(\x0b\x32\x42.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State_PerformativeH\x00\x12m\n\x12transaction_digest\x18\x0e \x01(\x0b\x32O.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Transaction_Digest_PerformativeH\x00\x12o\n\x13transaction_receipt\x18\x0f \x01(\x0b\x32P.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Transaction_Receipt_PerformativeH\x00\x1a\x18\n\x06Kwargs\x12\x0e\n\x06kwargs\x18\x01 \x01(\x0c\x1a)\n\x0eRawTransaction\x12\x17\n\x0fraw_transaction\x18\x01 \x01(\x0c\x1a/\n\x11SignedTransaction\x12\x1a\n\x12signed_transaction\x18\x01 \x01(\x0c\x1a\x16\n\x05State\x12\r\n\x05state\x18\x01 \x01(\x0c\x1a\x16\n\x05Terms\x12\r\n\x05terms\x18\x01 \x01(\x0c\x1a/\n\x11TransactionDigest\x12\x1a\n\x12transaction_digest\x18\x01 \x01(\x0c\x1a\x31\n\x12TransactionReceipt\x12\x1b\n\x13transaction_receipt\x18\x01 \x01(\x0c\x1a>\n\x18Get_Balance_Performative\x12\x11\n\tledger_id\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x02 \x01(\t\x1ah\n Get_Raw_Transaction_Performative\x12\x44\n\x05terms\x18\x01 \x01(\x0b\x32\x35.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Terms\x1a\x85\x01\n$Send_Signed_Transaction_Performative\x12]\n\x12signed_transaction\x18\x01 \x01(\x0b\x32\x41.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.SignedTransaction\x1a\x85\x01\n$Get_Transaction_Receipt_Performative\x12]\n\x12transaction_digest\x18\x01 \x01(\x0b\x32\x41.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionDigest\x1a:\n\x14\x42\x61lance_Performative\x12\x11\n\tledger_id\x18\x01 \x01(\t\x12\x0f\n\x07\x62\x61lance\x18\x02 \x01(\x05\x1aw\n\x1cRaw_Transaction_Performative\x12W\n\x0fraw_transaction\x18\x01 \x01(\x0b\x32>.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.RawTransaction\x1a\x80\x01\n\x1fTransaction_Digest_Performative\x12]\n\x12transaction_digest\x18\x01 \x01(\x0b\x32\x41.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionDigest\x1a\x83\x01\n Transaction_Receipt_Performative\x12_\n\x13transaction_receipt\x18\x01 \x01(\x0b\x32\x42.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionReceipt\x1a\x93\x01\n\x16Get_State_Performative\x12\x11\n\tledger_id\x18\x01 \x01(\t\x12\x10\n\x08\x63\x61llable\x18\x02 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x46\n\x06kwargs\x18\x04 \x01(\x0b\x32\x36.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Kwargs\x1am\n\x12State_Performative\x12\x11\n\tledger_id\x18\x01 \x01(\t\x12\x44\n\x05state\x18\x02 \x01(\x0b\x32\x35.aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State\x1an\n\x12\x45rror_Performative\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x16\n\x0emessage_is_set\x18\x03 \x01(\x08\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12\x13\n\x0b\x64\x61ta_is_set\x18\x05 \x01(\x08\x42\x0e\n\x0cperformativeb\x06proto3',
)
_LEDGERAPIMESSAGE_KWARGS = _descriptor.Descriptor(
name="Kwargs",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Kwargs",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="kwargs",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Kwargs.kwargs",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1205,
serialized_end=1229,
)
_LEDGERAPIMESSAGE_RAWTRANSACTION = _descriptor.Descriptor(
name="RawTransaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.RawTransaction",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="raw_transaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.RawTransaction.raw_transaction",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1231,
serialized_end=1272,
)
_LEDGERAPIMESSAGE_SIGNEDTRANSACTION = _descriptor.Descriptor(
name="SignedTransaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.SignedTransaction",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="signed_transaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.SignedTransaction.signed_transaction",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1274,
serialized_end=1321,
)
_LEDGERAPIMESSAGE_STATE = _descriptor.Descriptor(
name="State",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="state",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State.state",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1323,
serialized_end=1345,
)
_LEDGERAPIMESSAGE_TERMS = _descriptor.Descriptor(
name="Terms",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Terms",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="terms",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Terms.terms",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1347,
serialized_end=1369,
)
_LEDGERAPIMESSAGE_TRANSACTIONDIGEST = _descriptor.Descriptor(
name="TransactionDigest",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionDigest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transaction_digest",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionDigest.transaction_digest",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1371,
serialized_end=1418,
)
_LEDGERAPIMESSAGE_TRANSACTIONRECEIPT = _descriptor.Descriptor(
name="TransactionReceipt",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionReceipt",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transaction_receipt",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionReceipt.transaction_receipt",
index=0,
number=1,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1420,
serialized_end=1469,
)
_LEDGERAPIMESSAGE_GET_BALANCE_PERFORMATIVE = _descriptor.Descriptor(
name="Get_Balance_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Balance_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="ledger_id",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Balance_Performative.ledger_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="address",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Balance_Performative.address",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1471,
serialized_end=1533,
)
_LEDGERAPIMESSAGE_GET_RAW_TRANSACTION_PERFORMATIVE = _descriptor.Descriptor(
name="Get_Raw_Transaction_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Raw_Transaction_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="terms",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Raw_Transaction_Performative.terms",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1535,
serialized_end=1639,
)
_LEDGERAPIMESSAGE_SEND_SIGNED_TRANSACTION_PERFORMATIVE = _descriptor.Descriptor(
name="Send_Signed_Transaction_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Send_Signed_Transaction_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="signed_transaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Send_Signed_Transaction_Performative.signed_transaction",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1642,
serialized_end=1775,
)
_LEDGERAPIMESSAGE_GET_TRANSACTION_RECEIPT_PERFORMATIVE = _descriptor.Descriptor(
name="Get_Transaction_Receipt_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Transaction_Receipt_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transaction_digest",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Transaction_Receipt_Performative.transaction_digest",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1778,
serialized_end=1911,
)
_LEDGERAPIMESSAGE_BALANCE_PERFORMATIVE = _descriptor.Descriptor(
name="Balance_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Balance_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="ledger_id",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Balance_Performative.ledger_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="balance",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Balance_Performative.balance",
index=1,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1913,
serialized_end=1971,
)
_LEDGERAPIMESSAGE_RAW_TRANSACTION_PERFORMATIVE = _descriptor.Descriptor(
name="Raw_Transaction_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Raw_Transaction_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="raw_transaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Raw_Transaction_Performative.raw_transaction",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1973,
serialized_end=2092,
)
_LEDGERAPIMESSAGE_TRANSACTION_DIGEST_PERFORMATIVE = _descriptor.Descriptor(
name="Transaction_Digest_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Transaction_Digest_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transaction_digest",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Transaction_Digest_Performative.transaction_digest",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2095,
serialized_end=2223,
)
_LEDGERAPIMESSAGE_TRANSACTION_RECEIPT_PERFORMATIVE = _descriptor.Descriptor(
name="Transaction_Receipt_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Transaction_Receipt_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="transaction_receipt",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Transaction_Receipt_Performative.transaction_receipt",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2226,
serialized_end=2357,
)
_LEDGERAPIMESSAGE_GET_STATE_PERFORMATIVE = _descriptor.Descriptor(
name="Get_State_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_State_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="ledger_id",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_State_Performative.ledger_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="callable",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_State_Performative.callable",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="args",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_State_Performative.args",
index=2,
number=3,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="kwargs",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_State_Performative.kwargs",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2360,
serialized_end=2507,
)
_LEDGERAPIMESSAGE_STATE_PERFORMATIVE = _descriptor.Descriptor(
name="State_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="ledger_id",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State_Performative.ledger_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="state",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State_Performative.state",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2509,
serialized_end=2618,
)
_LEDGERAPIMESSAGE_ERROR_PERFORMATIVE = _descriptor.Descriptor(
name="Error_Performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Error_Performative",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="code",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Error_Performative.code",
index=0,
number=1,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="message",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Error_Performative.message",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="message_is_set",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Error_Performative.message_is_set",
index=2,
number=3,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Error_Performative.data",
index=3,
number=4,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="data_is_set",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Error_Performative.data_is_set",
index=4,
number=5,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2620,
serialized_end=2730,
)
_LEDGERAPIMESSAGE = _descriptor.Descriptor(
name="LedgerApiMessage",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="balance",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.balance",
index=0,
number=5,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="error",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.error",
index=1,
number=6,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="get_balance",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.get_balance",
index=2,
number=7,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="get_raw_transaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.get_raw_transaction",
index=3,
number=8,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="get_state",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.get_state",
index=4,
number=9,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="get_transaction_receipt",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.get_transaction_receipt",
index=5,
number=10,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="raw_transaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.raw_transaction",
index=6,
number=11,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="send_signed_transaction",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.send_signed_transaction",
index=7,
number=12,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="state",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.state",
index=8,
number=13,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="transaction_digest",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.transaction_digest",
index=9,
number=14,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="transaction_receipt",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.transaction_receipt",
index=10,
number=15,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[
_LEDGERAPIMESSAGE_KWARGS,
_LEDGERAPIMESSAGE_RAWTRANSACTION,
_LEDGERAPIMESSAGE_SIGNEDTRANSACTION,
_LEDGERAPIMESSAGE_STATE,
_LEDGERAPIMESSAGE_TERMS,
_LEDGERAPIMESSAGE_TRANSACTIONDIGEST,
_LEDGERAPIMESSAGE_TRANSACTIONRECEIPT,
_LEDGERAPIMESSAGE_GET_BALANCE_PERFORMATIVE,
_LEDGERAPIMESSAGE_GET_RAW_TRANSACTION_PERFORMATIVE,
_LEDGERAPIMESSAGE_SEND_SIGNED_TRANSACTION_PERFORMATIVE,
_LEDGERAPIMESSAGE_GET_TRANSACTION_RECEIPT_PERFORMATIVE,
_LEDGERAPIMESSAGE_BALANCE_PERFORMATIVE,
_LEDGERAPIMESSAGE_RAW_TRANSACTION_PERFORMATIVE,
_LEDGERAPIMESSAGE_TRANSACTION_DIGEST_PERFORMATIVE,
_LEDGERAPIMESSAGE_TRANSACTION_RECEIPT_PERFORMATIVE,
_LEDGERAPIMESSAGE_GET_STATE_PERFORMATIVE,
_LEDGERAPIMESSAGE_STATE_PERFORMATIVE,
_LEDGERAPIMESSAGE_ERROR_PERFORMATIVE,
],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="performative",
full_name="aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.performative",
index=0,
containing_type=None,
fields=[],
),
],
serialized_start=52,
serialized_end=2746,
)
_LEDGERAPIMESSAGE_KWARGS.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_RAWTRANSACTION.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_SIGNEDTRANSACTION.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_STATE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_TERMS.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_TRANSACTIONDIGEST.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_TRANSACTIONRECEIPT.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_GET_BALANCE_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_GET_RAW_TRANSACTION_PERFORMATIVE.fields_by_name[
"terms"
].message_type = _LEDGERAPIMESSAGE_TERMS
_LEDGERAPIMESSAGE_GET_RAW_TRANSACTION_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_SEND_SIGNED_TRANSACTION_PERFORMATIVE.fields_by_name[
"signed_transaction"
].message_type = _LEDGERAPIMESSAGE_SIGNEDTRANSACTION
_LEDGERAPIMESSAGE_SEND_SIGNED_TRANSACTION_PERFORMATIVE.containing_type = (
_LEDGERAPIMESSAGE
)
_LEDGERAPIMESSAGE_GET_TRANSACTION_RECEIPT_PERFORMATIVE.fields_by_name[
"transaction_digest"
].message_type = _LEDGERAPIMESSAGE_TRANSACTIONDIGEST
_LEDGERAPIMESSAGE_GET_TRANSACTION_RECEIPT_PERFORMATIVE.containing_type = (
_LEDGERAPIMESSAGE
)
_LEDGERAPIMESSAGE_BALANCE_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_RAW_TRANSACTION_PERFORMATIVE.fields_by_name[
"raw_transaction"
].message_type = _LEDGERAPIMESSAGE_RAWTRANSACTION
_LEDGERAPIMESSAGE_RAW_TRANSACTION_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_TRANSACTION_DIGEST_PERFORMATIVE.fields_by_name[
"transaction_digest"
].message_type = _LEDGERAPIMESSAGE_TRANSACTIONDIGEST
_LEDGERAPIMESSAGE_TRANSACTION_DIGEST_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_TRANSACTION_RECEIPT_PERFORMATIVE.fields_by_name[
"transaction_receipt"
].message_type = _LEDGERAPIMESSAGE_TRANSACTIONRECEIPT
_LEDGERAPIMESSAGE_TRANSACTION_RECEIPT_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_GET_STATE_PERFORMATIVE.fields_by_name[
"kwargs"
].message_type = _LEDGERAPIMESSAGE_KWARGS
_LEDGERAPIMESSAGE_GET_STATE_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_STATE_PERFORMATIVE.fields_by_name[
"state"
].message_type = _LEDGERAPIMESSAGE_STATE
_LEDGERAPIMESSAGE_STATE_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE_ERROR_PERFORMATIVE.containing_type = _LEDGERAPIMESSAGE
_LEDGERAPIMESSAGE.fields_by_name[
"balance"
].message_type = _LEDGERAPIMESSAGE_BALANCE_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"error"
].message_type = _LEDGERAPIMESSAGE_ERROR_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"get_balance"
].message_type = _LEDGERAPIMESSAGE_GET_BALANCE_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"get_raw_transaction"
].message_type = _LEDGERAPIMESSAGE_GET_RAW_TRANSACTION_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"get_state"
].message_type = _LEDGERAPIMESSAGE_GET_STATE_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"get_transaction_receipt"
].message_type = _LEDGERAPIMESSAGE_GET_TRANSACTION_RECEIPT_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"raw_transaction"
].message_type = _LEDGERAPIMESSAGE_RAW_TRANSACTION_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"send_signed_transaction"
].message_type = _LEDGERAPIMESSAGE_SEND_SIGNED_TRANSACTION_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"state"
].message_type = _LEDGERAPIMESSAGE_STATE_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"transaction_digest"
].message_type = _LEDGERAPIMESSAGE_TRANSACTION_DIGEST_PERFORMATIVE
_LEDGERAPIMESSAGE.fields_by_name[
"transaction_receipt"
].message_type = _LEDGERAPIMESSAGE_TRANSACTION_RECEIPT_PERFORMATIVE
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["balance"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"balance"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["error"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"error"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["get_balance"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"get_balance"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["get_raw_transaction"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"get_raw_transaction"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["get_state"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"get_state"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["get_transaction_receipt"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"get_transaction_receipt"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["raw_transaction"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"raw_transaction"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["send_signed_transaction"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"send_signed_transaction"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["state"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"state"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["transaction_digest"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"transaction_digest"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
_LEDGERAPIMESSAGE.oneofs_by_name["performative"].fields.append(
_LEDGERAPIMESSAGE.fields_by_name["transaction_receipt"]
)
_LEDGERAPIMESSAGE.fields_by_name[
"transaction_receipt"
].containing_oneof = _LEDGERAPIMESSAGE.oneofs_by_name["performative"]
DESCRIPTOR.message_types_by_name["LedgerApiMessage"] = _LEDGERAPIMESSAGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LedgerApiMessage = _reflection.GeneratedProtocolMessageType(
"LedgerApiMessage",
(_message.Message,),
{
"Kwargs": _reflection.GeneratedProtocolMessageType(
"Kwargs",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_KWARGS,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Kwargs)
},
),
"RawTransaction": _reflection.GeneratedProtocolMessageType(
"RawTransaction",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_RAWTRANSACTION,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.RawTransaction)
},
),
"SignedTransaction": _reflection.GeneratedProtocolMessageType(
"SignedTransaction",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_SIGNEDTRANSACTION,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.SignedTransaction)
},
),
"State": _reflection.GeneratedProtocolMessageType(
"State",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_STATE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State)
},
),
"Terms": _reflection.GeneratedProtocolMessageType(
"Terms",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_TERMS,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Terms)
},
),
"TransactionDigest": _reflection.GeneratedProtocolMessageType(
"TransactionDigest",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_TRANSACTIONDIGEST,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionDigest)
},
),
"TransactionReceipt": _reflection.GeneratedProtocolMessageType(
"TransactionReceipt",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_TRANSACTIONRECEIPT,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.TransactionReceipt)
},
),
"Get_Balance_Performative": _reflection.GeneratedProtocolMessageType(
"Get_Balance_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_GET_BALANCE_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Balance_Performative)
},
),
"Get_Raw_Transaction_Performative": _reflection.GeneratedProtocolMessageType(
"Get_Raw_Transaction_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_GET_RAW_TRANSACTION_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Raw_Transaction_Performative)
},
),
"Send_Signed_Transaction_Performative": _reflection.GeneratedProtocolMessageType(
"Send_Signed_Transaction_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_SEND_SIGNED_TRANSACTION_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Send_Signed_Transaction_Performative)
},
),
"Get_Transaction_Receipt_Performative": _reflection.GeneratedProtocolMessageType(
"Get_Transaction_Receipt_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_GET_TRANSACTION_RECEIPT_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_Transaction_Receipt_Performative)
},
),
"Balance_Performative": _reflection.GeneratedProtocolMessageType(
"Balance_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_BALANCE_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Balance_Performative)
},
),
"Raw_Transaction_Performative": _reflection.GeneratedProtocolMessageType(
"Raw_Transaction_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_RAW_TRANSACTION_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Raw_Transaction_Performative)
},
),
"Transaction_Digest_Performative": _reflection.GeneratedProtocolMessageType(
"Transaction_Digest_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_TRANSACTION_DIGEST_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Transaction_Digest_Performative)
},
),
"Transaction_Receipt_Performative": _reflection.GeneratedProtocolMessageType(
"Transaction_Receipt_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_TRANSACTION_RECEIPT_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Transaction_Receipt_Performative)
},
),
"Get_State_Performative": _reflection.GeneratedProtocolMessageType(
"Get_State_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_GET_STATE_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Get_State_Performative)
},
),
"State_Performative": _reflection.GeneratedProtocolMessageType(
"State_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_STATE_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.State_Performative)
},
),
"Error_Performative": _reflection.GeneratedProtocolMessageType(
"Error_Performative",
(_message.Message,),
{
"DESCRIPTOR": _LEDGERAPIMESSAGE_ERROR_PERFORMATIVE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage.Error_Performative)
},
),
"DESCRIPTOR": _LEDGERAPIMESSAGE,
"__module__": "ledger_api_pb2"
# @@protoc_insertion_point(class_scope:aea.fetchai.ledger_api.v0_1_0.LedgerApiMessage)
},
)
_sym_db.RegisterMessage(LedgerApiMessage)
_sym_db.RegisterMessage(LedgerApiMessage.Kwargs)
_sym_db.RegisterMessage(LedgerApiMessage.RawTransaction)
_sym_db.RegisterMessage(LedgerApiMessage.SignedTransaction)
_sym_db.RegisterMessage(LedgerApiMessage.State)
_sym_db.RegisterMessage(LedgerApiMessage.Terms)
_sym_db.RegisterMessage(LedgerApiMessage.TransactionDigest)
_sym_db.RegisterMessage(LedgerApiMessage.TransactionReceipt)
_sym_db.RegisterMessage(LedgerApiMessage.Get_Balance_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Get_Raw_Transaction_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Send_Signed_Transaction_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Get_Transaction_Receipt_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Balance_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Raw_Transaction_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Transaction_Digest_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Transaction_Receipt_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Get_State_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.State_Performative)
_sym_db.RegisterMessage(LedgerApiMessage.Error_Performative)
# @@protoc_insertion_point(module_scope)
| 36.490502
| 3,928
| 0.654284
| 5,376
| 53,787
| 6.147507
| 0.047805
| 0.033163
| 0.047929
| 0.05392
| 0.825169
| 0.767981
| 0.694998
| 0.653242
| 0.641926
| 0.630821
| 0
| 0.034655
| 0.25376
| 53,787
| 1,473
| 3,929
| 36.515275
| 0.788729
| 0.040679
| 0
| 0.714085
| 1
| 0.000704
| 0.221837
| 0.174565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002817
| 0
| 0.002817
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
143f42e0ed4fe1e3e6a3ecef66105575c2c69a00
| 854
|
py
|
Python
|
Class_Point2D.py
|
sdfxisme/lesson11
|
2559b2096d6329cddca7eae829b6360adced6dc6
|
[
"MIT"
] | null | null | null |
Class_Point2D.py
|
sdfxisme/lesson11
|
2559b2096d6329cddca7eae829b6360adced6dc6
|
[
"MIT"
] | null | null | null |
Class_Point2D.py
|
sdfxisme/lesson11
|
2559b2096d6329cddca7eae829b6360adced6dc6
|
[
"MIT"
] | null | null | null |
class Point2D():
def __init__(self, x,y):
self.coord = [x,y]
def __str__(self):
return f'Point:({self.coord[0]},{self.coord[1]})'
def __eq__(self,other):
return (self.coord[0]==other.coord[0])&(self.coord[1]==other.coord[1])
def __ne__(self,other):
return (self.coord[0]!=other.coord[0])&(self.coord[1]!=other.coord[1])
def __gt__(self,other):
return (self.distance()>other.distance())
def __le__(self,other):
return (self.distance()<=other.distance())
def __ge__(self,other):
return (self.distance()>=other.distance())
def __ne__(self,other):
return (self.coord[0]!=other.coord[0])&(self.coord[1]!=other.coord[1])
def distance(self):
return (self.coord[0]**2+self.coord[1]**2)**0.5
if __name__=='__main__':
point1 = Point2D(1,1)
| 27.548387
| 78
| 0.596019
| 122
| 854
| 3.844262
| 0.221311
| 0.211087
| 0.191898
| 0.24307
| 0.701493
| 0.667377
| 0.667377
| 0.667377
| 0.398721
| 0.398721
| 0
| 0.036496
| 0.197892
| 854
| 31
| 79
| 27.548387
| 0.648175
| 0
| 0
| 0.190476
| 0
| 0
| 0.054971
| 0.045614
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.380952
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1446e6c23674c2f6ec72abbe62c765cefc1103f6
| 37
|
py
|
Python
|
HelloWorld.py
|
Gersh2021/django_project
|
621554d42828207c22047c48a04ed1ac72b1ec8c
|
[
"MIT"
] | null | null | null |
HelloWorld.py
|
Gersh2021/django_project
|
621554d42828207c22047c48a04ed1ac72b1ec8c
|
[
"MIT"
] | null | null | null |
HelloWorld.py
|
Gersh2021/django_project
|
621554d42828207c22047c48a04ed1ac72b1ec8c
|
[
"MIT"
] | null | null | null |
print('Welcome to Django Learinig')
| 12.333333
| 35
| 0.756757
| 5
| 37
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 37
| 2
| 36
| 18.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
14b8275094e6afa2ecc99e5bf6af1269f5706a49
| 2,807
|
py
|
Python
|
src/centralizeddictionarylearning.py
|
att/Distributed-Dictionary-Learning
|
c6915477dac1ffe7d9a732dd31333d540f5bd00b
|
[
"MIT"
] | 5
|
2020-07-25T18:44:54.000Z
|
2022-03-27T14:14:20.000Z
|
src/centralizeddictionarylearning.py
|
att/Distributed-Dictionary-Learning
|
c6915477dac1ffe7d9a732dd31333d540f5bd00b
|
[
"MIT"
] | null | null | null |
src/centralizeddictionarylearning.py
|
att/Distributed-Dictionary-Learning
|
c6915477dac1ffe7d9a732dd31333d540f5bd00b
|
[
"MIT"
] | 3
|
2019-09-04T08:04:09.000Z
|
2022-03-27T14:14:26.000Z
|
import time
import numpy as np
from sparsecoding import sparse_encode_omp
from sparsecoding import sparse_encode_nnmp
from dictionaryupdate import l2_update_dict
from dictionaryupdate import nnl2_update_dict
def dict_learning_ksvd(X, dict_init, s, max_iter):
"""Solves a dictionary learning matrix factorization problem.
Finds the best dictionary and the corresponding sparse code for
approximating the data matrix X by solving:
(D, Theta) = argmin || X - D Theta ||_F^2
with || d_k ||_2 = 1 and || theta_i ||_0 <= s
where D is the dictionary and Theta is the sparse code.
Parameters
----------
Inputs:
X: data matrix
dict_init: array of shape (n_features, n_components)
s: sparsity controlling parameter
max_iter: maximum number of iterations to perform
Outputs:
D: dictionary at every iteration
"""
sigdim = X.shape[0]
D = np.zeros([sigdim, dict_init.shape[1], max_iter+1])
D[:,:,0] = dict_init
for iters in range(max_iter):
t0 = time.time()
# Update coefficients
coef = sparse_encode_omp(X, D[:,:,iters], s)
# Update dictionary
D[:,:,iters+1], coef = l2_update_dict(D[:,:,iters], X, coef)
dt = time.time() - t0
print('the %dth iteration takes %f seconds' %(iters,dt))
return D
def dict_learning_nnksvd(X, dict_init, s, max_iter, updatec_iter):
"""Solves a dictionary learning matrix factorization problem.
Finds the best dictionary and the corresponding sparse code for
approximating the data matrix X by solving:
(D, Theta) = argmin || X - D Theta ||_F^2
with D, Theta >= 0, || d_k ||_2 = 1 and || theta_i ||_0 <= s
where D is the dictionary and Theta is the sparse code.
Parameters
----------
Inputs:
X: data matrix
dict_init: array of shape (n_features, n_components)
s: sparsity controlling parameter
max_iter: maximum number of iterations to perform
updatec_iter: number of iterations for updating coefficients in the sparse coding
Outputs:
D: dictionary at every iteration
"""
sigdim = X.shape[0]
D = np.zeros([sigdim, dict_init.shape[1], max_iter+1])
D[:,:,0] = dict_init
for iters in range(max_iter):
t0 = time.time()
# Update coefficients
coef = sparse_encode_nnmp(X, D[:,:,iters], s, updatec_iter)
# Update dictionary
D[:,:,iters+1], coef = nnl2_update_dict(D[:,:,iters], X, coef)
dt = time.time() - t0
print('the %dth iteration takes %f seconds' %(iters,dt))
return D
| 35.531646
| 90
| 0.601354
| 371
| 2,807
| 4.407008
| 0.245283
| 0.039144
| 0.033028
| 0.034251
| 0.833639
| 0.792049
| 0.740673
| 0.740673
| 0.740673
| 0.740673
| 0
| 0.013768
| 0.301389
| 2,807
| 79
| 91
| 35.531646
| 0.81999
| 0.473459
| 0
| 0.571429
| 0
| 0
| 0.055512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.214286
| 0
| 0.357143
| 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1ad6be8bccd6f92d1eb48bfedd4667f3e8354537
| 392
|
py
|
Python
|
src/train_plot.py
|
RosalRicardo/RTRGAN
|
6f4551ab8445367f8b9c711f41f15dd465abaef1
|
[
"MIT"
] | null | null | null |
src/train_plot.py
|
RosalRicardo/RTRGAN
|
6f4551ab8445367f8b9c711f41f15dd465abaef1
|
[
"MIT"
] | null | null | null |
src/train_plot.py
|
RosalRicardo/RTRGAN
|
6f4551ab8445367f8b9c711f41f15dd465abaef1
|
[
"MIT"
] | null | null | null |
import train
def train_plot(df, epochs, batchsize, fair_epochs, lamda, nu, S, Y, S_under, Y_desire):
generator, critic, ohe, scaler, data_train, data_test, input_dim, critic_losses, gen_losses = train.train(df, S, Y, S_under, Y_desire, epochs, batchsize, fair_epochs, lamda, nu)
return generator, critic, ohe, scaler, data_train, data_test, input_dim, critic_losses, gen_losses
| 56
| 181
| 0.75
| 62
| 392
| 4.467742
| 0.403226
| 0.108303
| 0.137184
| 0.180505
| 0.844765
| 0.844765
| 0.505415
| 0.505415
| 0.505415
| 0.505415
| 0
| 0
| 0.145408
| 392
| 7
| 182
| 56
| 0.826866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
1aed44476811af5d207a5e79795e1754da968150
| 209
|
py
|
Python
|
mail_sender/admin.py
|
Kiritox0x/trp_assistant_bot
|
9776d3b6fc6ebccb5d6b8c0ea68c9b145dc4e06a
|
[
"MIT"
] | null | null | null |
mail_sender/admin.py
|
Kiritox0x/trp_assistant_bot
|
9776d3b6fc6ebccb5d6b8c0ea68c9b145dc4e06a
|
[
"MIT"
] | null | null | null |
mail_sender/admin.py
|
Kiritox0x/trp_assistant_bot
|
9776d3b6fc6ebccb5d6b8c0ea68c9b145dc4e06a
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from mail_sender.models import *
admin.site.register(Bcm_room)
admin.site.register(Weekreport_Teacher)
admin.site.register(Weekreport_Assistant)
| 23.222222
| 41
| 0.832536
| 29
| 209
| 5.862069
| 0.586207
| 0.158824
| 0.3
| 0.317647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086124
| 209
| 8
| 42
| 26.125
| 0.890052
| 0.124402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
1af15f017a6de5ffecb6882b47e0671bf9f55c11
| 49
|
py
|
Python
|
src/models/resnet/__init__.py
|
yizhe-ang/vi-lab
|
65a46e41845982def17e553a46be849d948f9f45
|
[
"MIT"
] | 1
|
2022-02-19T17:03:18.000Z
|
2022-02-19T17:03:18.000Z
|
src/models/resnet/__init__.py
|
lemonwaffle/vi-lab
|
65a46e41845982def17e553a46be849d948f9f45
|
[
"MIT"
] | null | null | null |
src/models/resnet/__init__.py
|
lemonwaffle/vi-lab
|
65a46e41845982def17e553a46be849d948f9f45
|
[
"MIT"
] | null | null | null |
from .modules import ResNetEncoder, ResNetDecoder
| 49
| 49
| 0.877551
| 5
| 49
| 8.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.955556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
21122cf60125f78dff73ab5b6977934e64c1668f
| 118
|
py
|
Python
|
GENBOT/code/model/__ init __.py
|
Snd18/GENBOT_public
|
a60f69749ffdf43b9b688153d5c4a3488b3a9396
|
[
"Apache-2.0"
] | null | null | null |
GENBOT/code/model/__ init __.py
|
Snd18/GENBOT_public
|
a60f69749ffdf43b9b688153d5c4a3488b3a9396
|
[
"Apache-2.0"
] | null | null | null |
GENBOT/code/model/__ init __.py
|
Snd18/GENBOT_public
|
a60f69749ffdf43b9b688153d5c4a3488b3a9396
|
[
"Apache-2.0"
] | null | null | null |
import model.UserData as userdata
import model.DataBase as db
import model.Table as table
import model.Field as field
| 23.6
| 33
| 0.830508
| 20
| 118
| 4.9
| 0.4
| 0.44898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 118
| 4
| 34
| 29.5
| 0.960784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
21603a94fea2542273936e7f8fe1a9596f8505fa
| 85
|
py
|
Python
|
XPlaneSDKStub/XPLMUtilities.py
|
owentar/x-copilot-py
|
15ffa9d22fc2a906b5437d8484b4002c635085fe
|
[
"MIT"
] | null | null | null |
XPlaneSDKStub/XPLMUtilities.py
|
owentar/x-copilot-py
|
15ffa9d22fc2a906b5437d8484b4002c635085fe
|
[
"MIT"
] | null | null | null |
XPlaneSDKStub/XPLMUtilities.py
|
owentar/x-copilot-py
|
15ffa9d22fc2a906b5437d8484b4002c635085fe
|
[
"MIT"
] | null | null | null |
def XPLMFindCommand(command):
return 1
def XPLMCommandOnce(commandID):
pass
| 14.166667
| 31
| 0.741176
| 9
| 85
| 7
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014493
| 0.188235
| 85
| 5
| 32
| 17
| 0.898551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.25
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
dcddd753444dc30f3c8e9997a788150fe3e4eeea
| 7,683
|
py
|
Python
|
sumup_dev.py
|
systemdesignauthority/sumup
|
ecdc7f97b130ef41ad7d2b2d3b038fa8af00398f
|
[
"Apache-2.0"
] | null | null | null |
sumup_dev.py
|
systemdesignauthority/sumup
|
ecdc7f97b130ef41ad7d2b2d3b038fa8af00398f
|
[
"Apache-2.0"
] | null | null | null |
sumup_dev.py
|
systemdesignauthority/sumup
|
ecdc7f97b130ef41ad7d2b2d3b038fa8af00398f
|
[
"Apache-2.0"
] | null | null | null |
import requests, json
from datetime import date, timedelta
#authorize
url = 'https://api.sumup.com/authorize'
body = ''
response = requests.get(
url,
body, headers={'response_type': 'code',
'redirect_uri': 'https://me.sumup.com/callback',
'client_id': '5UxmEj--5f6DS-rQzPmn2hfMweDo',
'scope': 'payments transaction.history user.profile user.app-settings payment_instruments',
'state': '2cFCsY36y95lFHk4'
}
)
print (response)
#get bearer token
url = 'https://api.sumup.com/token'
body = ''
response = requests.post(
url,
headers={'Content-Type': 'application/x-www-form-urlencoded'},
data={'grant_type': 'password',
'client_id': '5UxmEj--5f6DS-rQzPmn2hfMweDo',
'username': 'simonballepsa@gmail.com',
'password': 'E8us[>m\";fy%\\UT]'
}
)
if(response.ok):
# Loading the response data into a dict variable
# json.loads takes in only binary or string variables so using content to fetch binary content
# Loads (Load String) takes a Json file and converts into python data structure (dict or list, depending on JSON)
jData = json.loads(response.content)
#print(jData)
print("The response contains {0} properties".format(jData))
print("\n")
for key in jData:
print (key + " : " + str(jData[key]))
#store token
access_token = jData['access_token']
else:
# If response code is not ok (200), print the resulting http error code with description
print("Failure")
response.raise_for_status()
print(access_token)
#list transactions and get ids
today = date.today()
day = today - timedelta(days=7)
iso = day.isoformat()
requests.packages.urllib3.disable_warnings()
url = 'https://api.sumup.com/v0.1/me/transactions/history'
response = requests.get(
url,
headers={'Content-Type': 'application/json',
'Authorization': 'Bearer ' + access_token
},
params={'changes_since': iso,
'limit': '10'
}
)
if(response.ok):
# Loading the response data into a dict variable
# json.loads takes in only binary or string variables so using content to fetch binary content
# Loads (Load String) takes a Json file and converts into python data structure (dict or list, depending on JSON)
jData = json.loads(response.content)
print(jData)
#print("The response contains {0} properties".format(jData))
#print("\n")
#for key in jData:
#if key == 'amount':
# print (key + " : " + str(jData[key]) + "\n")
#print (jData['items'][0]['amount'])
for item in jData['items']:
print(item['transaction_code'])
print(item['amount'])
#for idx in jData['items']:
#print (idx)
# print ()
#print jData['items']['id]']
#store token
#access_token = jData['access_token']
#print (jData['timestamp'])
#print (jData['amount'])
#print (jData['/n'])
#
else:
# If response code is not ok (200), print the resulting http error code with description
print("Failure")
response.raise_for_status()
#print(access_token)
#get transactions
today = date.today()
day = today - timedelta(days=7)
iso = day.isoformat()
requests.packages.urllib3.disable_warnings()
url = 'https://api.sumup.com/v0.1/me/transactions'
response = requests.get(
url,
headers={'Content-Type': 'application/json',
'Authorization': 'Bearer ' + access_token
},
params={'transaction_code': 'TD63RA477P'
}
)
if(response.ok):
# Loading the response data into a dict variable
# json.loads takes in only binary or string variables so using content to fetch binary content
# Loads (Load String) takes a Json file and converts into python data structure (dict or list, depending on JSON)
jData = json.loads(response.content)
print(jData)
#print("The response contains {0} properties".format(jData))
#print("\n")
#for key in jData:
#if key == 'amount':
# print (key + " : " + str(jData[key]) + "\n")
#print (jData['items'][0]['amount'])
#for item in jData['items']:
# print(item['transaction_code'])
# print(item['amount'])
#for idx in jData['items']:
#print (idx)
# print ()
#print jData['items']['id]']
#store token
#access_token = jData['access_token']
#print (jData['timestamp'])
#print (jData['amount'])
#print (jData['/n'])
#
else:
# If response code is not ok (200), print the resulting http error code with description
print("Failure")
response.raise_for_status()
#print(access_token)
#dcc9b0d6-874b-4cb4-9441-3fff868ac5ca
#get products from transaction id
requests.packages.urllib3.disable_warnings()
url = 'https://api.sumup.com/v1.0/receipts/TD63RA477P'
response = requests.get(
url,
headers={'Content-Type': 'application/json',
'Authorization': 'Bearer ' + access_token,
},
params={'mid': 'M9KS4HS4'
}
)
if(response.ok):
# Loading the response data into a dict variable
# json.loads takes in only binary or string variables so using content to fetch binary content
# Loads (Load String) takes a Json file and converts into python data structure (dict or list, depending on JSON)
jData = json.loads(response.content)
print(jData)
#print("The response contains {0} properties".format(jData))
#print("\n")
#for key in jData:
#if key == 'amount':
# print (key + " : " + str(jData[key]) + "\n")
#print (jData['items'][0]['amount'])
#for item in jData['items']:
# print(item['id'])
#for idx in jData['items']:
#print (idx)
# print ()
#print jData['items']['id]']
#store token
#access_token = jData['access_token']
#print (jData['timestamp'])
#print (jData['amount'])
#print (jData['/n'])
#
else:
# If response code is not ok (200), print the resulting http error code with description
print("Failure")
response.raise_for_status()
requests.packages.urllib3.disable_warnings()
url = 'https://me.sumup.com/en-gb/reports/online-store'
response = requests.get(
url,
headers={'Content-Type': 'application/json',
'Authorization': 'Bearer ' + access_token,
},
)
if(response.ok):
# Loading the response data into a dict variable
# json.loads takes in only binary or string variables so using content to fetch binary content
# Loads (Load String) takes a Json file and converts into python data structure (dict or list, depending on JSON)
jData = json.loads(response.content)
print(jData)
#print("The response contains {0} properties".format(jData))
#print("\n")
#for key in jData:
#if key == 'amount':
# print (key + " : " + str(jData[key]) + "\n")
#print (jData['items'][0]['amount'])
#for item in jData['items']:
# print(item['id'])
#for idx in jData['items']:
#print (idx)
# print ()
#print jData['items']['id]']
#store token
#access_token = jData['access_token']
#print (jData['timestamp'])
#print (jData['amount'])
#print (jData['/n'])
#
else:
# If response code is not ok (200), print the resulting http error code with description
print("Failure")
response.raise_for_status()
| 29.894942
| 115
| 0.604972
| 928
| 7,683
| 4.96444
| 0.163793
| 0.054265
| 0.026047
| 0.02952
| 0.86781
| 0.834165
| 0.834165
| 0.816149
| 0.816149
| 0.816149
| 0
| 0.014567
| 0.258363
| 7,683
| 256
| 116
| 30.011719
| 0.793963
| 0.475335
| 0
| 0.574074
| 0
| 0
| 0.245856
| 0.028564
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.018519
| 0.018519
| 0
| 0.018519
| 0.148148
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b4bc32f833fef8791e3cc71fac2d219689070ee1
| 1,618
|
py
|
Python
|
malcolm/core/__init__.py
|
dls-controls/github-publish-test
|
25f6ce1af28eff9930f65e4f2c9fb0474e0b570c
|
[
"Apache-2.0"
] | null | null | null |
malcolm/core/__init__.py
|
dls-controls/github-publish-test
|
25f6ce1af28eff9930f65e4f2c9fb0474e0b570c
|
[
"Apache-2.0"
] | null | null | null |
malcolm/core/__init__.py
|
dls-controls/github-publish-test
|
25f6ce1af28eff9930f65e4f2c9fb0474e0b570c
|
[
"Apache-2.0"
] | null | null | null |
# Make a nice namespace
from malcolm.core.attribute import Attribute # noqa
from malcolm.core.block import Block # noqa
from malcolm.core.controller import Controller # noqa
from malcolm.core.clientcomms import ClientComms # noqa
from malcolm.core.clientcontroller import ClientController # noqa
from malcolm.core.elementmap import ElementMap # noqa
from malcolm.core.hook import Hook # noqa
from malcolm.core.loggable import Loggable # noqa
from malcolm.core.map import Map # noqa
from malcolm.core.methodmeta import MethodMeta, method_takes, method_returns, \
method_only_in, REQUIRED, OPTIONAL # noqa
from malcolm.core.ntscalar import NTScalar # noqa
from malcolm.core.ntscalararray import NTScalarArray # noqa
from malcolm.core.nttable import NTTable # noqa
from malcolm.core.ntunion import NTUnion # noqa
from malcolm.core.part import Part # noqa
from malcolm.core.process import Process # noqa
from malcolm.core.request import Request, Get, Put, Post, Subscribe, \
Unsubscribe # noqa
from malcolm.core.response import Response, Return, Error, Delta, Update # noqa
from malcolm.core.serializable import Serializable, serialize_object, \
deserialize_object # noqa
from malcolm.core.servercomms import ServerComms # noqa
from malcolm.core.spawnable import Spawnable # noqa
from malcolm.core.statemachine import RunnableDeviceStateMachine, \
DefaultStateMachine # noqa
from malcolm.core.syncfactory import SyncFactory # noqa
from malcolm.core.table import Table # noqa
from malcolm.core.tableelementmap import TableElementMap # noqa
from malcolm.core.task import Task # noqa
| 52.193548
| 80
| 0.800371
| 209
| 1,618
| 6.167464
| 0.267943
| 0.221877
| 0.30256
| 0.368503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137824
| 1,618
| 31
| 81
| 52.193548
| 0.924014
| 0.093325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.866667
| 0
| 0.866667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b4c71bf6c3fb7dd6ea0cbf6097a825bcc7a2ac64
| 62
|
py
|
Python
|
src/quill/scan/__init__.py
|
spin-systems/quill
|
b210f840e228295e41feaf4ff79f2cc8bd96889d
|
[
"MIT"
] | null | null | null |
src/quill/scan/__init__.py
|
spin-systems/quill
|
b210f840e228295e41feaf4ff79f2cc8bd96889d
|
[
"MIT"
] | 14
|
2020-11-22T09:59:08.000Z
|
2021-11-02T16:05:35.000Z
|
src/quill/scan/__init__.py
|
spin-systems/quill
|
b210f840e228295e41feaf4ff79f2cc8bd96889d
|
[
"MIT"
] | null | null | null |
from .io import *
from .lever import *
from .address import *
| 15.5
| 22
| 0.709677
| 9
| 62
| 4.888889
| 0.555556
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 62
| 3
| 23
| 20.666667
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2ea48275791eab4062a46e7033dcd3cf3709e92d
| 187
|
py
|
Python
|
src/lib/trains/train_factory.py
|
larryv/IP-Net
|
ab0362097c4ca7ff07763dbf2dfca028f974b014
|
[
"MIT"
] | 66
|
2020-03-31T04:24:45.000Z
|
2022-02-22T02:55:02.000Z
|
src/lib/trains/train_factory.py
|
larryv/IP-Net
|
ab0362097c4ca7ff07763dbf2dfca028f974b014
|
[
"MIT"
] | 12
|
2020-04-06T22:34:26.000Z
|
2021-11-22T07:58:53.000Z
|
src/lib/trains/train_factory.py
|
larryv/IP-Net
|
ab0362097c4ca7ff07763dbf2dfca028f974b014
|
[
"MIT"
] | 8
|
2020-09-20T09:01:45.000Z
|
2022-02-22T02:55:04.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .ctdet import CtdetTrainer
train_factory = {
'ctdet': CtdetTrainer}
| 18.7
| 38
| 0.823529
| 22
| 187
| 6.318182
| 0.5
| 0.215827
| 0.345324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139037
| 187
| 9
| 39
| 20.777778
| 0.863354
| 0
| 0
| 0
| 0
| 0
| 0.026882
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2584464703d0d9c136086920a97bbccca49fc2c3
| 7,194
|
py
|
Python
|
evodynamic/connection/cellular_automata.py
|
SocratesNFR/evodynamic
|
682b610096182bde2298cdca352e7b319a0e4c41
|
[
"Apache-2.0"
] | 9
|
2019-06-07T22:57:07.000Z
|
2022-01-17T12:35:08.000Z
|
evodynamic/connection/cellular_automata.py
|
SocratesNFR/evodynamic
|
682b610096182bde2298cdca352e7b319a0e4c41
|
[
"Apache-2.0"
] | null | null | null |
evodynamic/connection/cellular_automata.py
|
SocratesNFR/evodynamic
|
682b610096182bde2298cdca352e7b319a0e4c41
|
[
"Apache-2.0"
] | 4
|
2020-09-02T16:17:58.000Z
|
2021-12-05T21:28:32.000Z
|
""" Connections for cellular automata """
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
import numpy as np
def create_count_neighbors_ca1d(width):
"""
Returns a list with the weights for 'neighbors' and 'center_idx' parameters
of evodynamic.connection.cellular_automata.create_conn_matrix_ca1d(...).
The weights are responsible to count the number of alive neighbors.
Parameters
----------
width : int
Neighborhood size.
Returns
-------
out1 : list
List of weights of the neighbors.
out2 : int
Index of the center of the neighborhood.
"""
return [1 if p != width//2 else 0 for p in range(width)], width//2
def create_pattern_neighbors_ca1d(width, n_states=2):
"""
Returns a list with the weights for 'neighbors' and 'center_idx' parameters
of evodynamic.connection.cellular_automata.create_conn_matrix_ca1d(...).
The weights are responsible to calculate an unique number for each different
neighborhood pattern.
Parameters
----------
width : int
Neighborhood size.
n_states : int
Number of discrete state in a cell.
Returns
-------
out1 : list
List of weights of the neighbors.
out2 : int
Index of the center of the neighborhood.
"""
return [n_states**p for p in range(width)[::-1]], width//2
def create_conn_matrix_ca1d(name, width,\
neighbors=[4,2,1],\
center_idx=1, is_wrapped_ca = True,\
is_sparse = True):
"""
This function creates a connection matrix for
evodynamic.connection.WeightedConnection, so it can connect the cells as
in a 1D cellular automaton.
Parameters
----------
name : str
Name of the Tensor.
width : int
Neighborhood size.
neighbors : list of int/float
Weights for the neighbors.
center_idx : int
Index of the center cell in the neighborhood.
is_wrapped_ca : Boolean
Activates the wrapped boundary condition.
is_sparse : Boolean
Defines the type of Tensor variable for the connection matrix.
Returns
-------
out : Tensor
Connection matrix for TensorFlow.
"""
neighbors_arr = np.array(neighbors)
assert (len(neighbors_arr.shape) == 1),\
"'neighbors' must be a list or numpy.ndarray with 1 dimensions!"
nodes = width
idx_dict_list = []
for i in range(nodes):
idx_dict_list.append({})
for ii in range(-center_idx,neighbors_arr.shape[0]-center_idx):
current_neighbor_cell = neighbors_arr[ii+center_idx]
if (current_neighbor_cell != 0 and (is_wrapped_ca or \
(not is_wrapped_ca and (0<=(i+ii)<width)))):
idx_dict_list[-1][(i+ii)%width] = current_neighbor_cell
if is_sparse:
indices = []
values = []
for i, idx_dict in enumerate(idx_dict_list):
for k in idx_dict:
indices.append([i,k])
values.append(idx_dict[k])
initial = tf.cast(tf.SparseTensor(indices=indices, values=values,\
dense_shape=[nodes, nodes]), tf.float64)
else:
conn_matrix = np.zeros((nodes, nodes))
for i, idx_dict in enumerate(idx_dict_list):
for k in idx_dict:
conn_matrix[i,k] = idx_dict[k]
initial = conn_matrix
return initial if is_sparse else tf.get_variable(name, initializer=initial)
def create_count_neighbors_ca2d(width, height):
"""
Returns a list with the weights for 'neighbors' and 'center_idx' parameters
of evodynamic.connection.cellular_automata.create_conn_matrix_ca2d(...).
The weights are responsible to count the number of alive neighbors.
Parameters
----------
width : int
Neighborhood width.
height : int
Neighborhood height.
Returns
-------
out1 : list
List of weights of the neighbors.
out2 : list
Index of the center of the neighborhood.
"""
center_idx_flat = width*(height//2) + width//2
return np.array([1 if p != center_idx_flat else 0 for p in range(width*height)])\
.reshape(width,height), [width//2, height//2]
def create_pattern_neighbors_ca2d(width, height, n_states=2):
"""
Returns a list with the weights for 'neighbors' and 'center_idx' parameters
of evodynamic.connection.cellular_automata.create_conn_matrix_ca1d(...).
The weights are responsible to calculate an unique number for each different
neighborhood pattern.
Parameters
----------
width : int
Neighborhood width.
height : int
Neighborhood height.
n_states : int
Number of discrete state in a cell.
Returns
-------
out1 : list
List of weights of the neighbors.
out2 : int
Index of the center of the neighborhood.
"""
return np.array([n_states**p for p in range(width*height)]).reshape(width,height),\
[width//2, height//2]
def create_conn_matrix_ca2d(name, width, height,\
neighbors=[[0,1,0],[1,0,1],[0,1,0]],\
center_idx=[1,1], is_wrapped_ca = True,\
is_sparse = True):
"""
This function creates a connection matrix for
evodynamic.connection.WeightedConnection, so it can connect the cells as
in a 2D cellular automaton.
Parameters
----------
name : str
Name of the Tensor.
width : int
Neighborhood width.
height : int
Neighborhood height.
neighbors : matrix or 2D list of int/float.
Weights for the neighbors.
center_idx : list
Index of the center cell in the neighborhood.
is_wrapped_ca : Boolean
Activates the wrapped boundary condition.
is_sparse : Boolean
Defines the type of Tensor variable for the connection matrix.
Returns
-------
out : Tensor
Connection matrix for TensorFlow.
"""
neighbors_arr = np.array(neighbors)
assert (len(neighbors_arr.shape) == 2),\
"'neighbors' must be a list or numpy.ndarray with 2 dimensions!"
assert (len(center_idx) == 2),\
"'center_idx' must be a list with 2 elements!"
nodes = width* height
idx_dict_list = []
for i in range(nodes):
idx_dict_list.append({})
for ii in range(-center_idx[1],neighbors_arr.shape[0]-center_idx[1]):
for jj in range(-center_idx[0],neighbors_arr.shape[1]-center_idx[0]):
current_neighbor_cell = neighbors_arr[ii+center_idx[1], jj+center_idx[0]]
if (current_neighbor_cell != 0 and (is_wrapped_ca or \
(not is_wrapped_ca and (0 <= ((i%height)+ii) < width) and \
(0 <= ((i//width)+jj) < height)))):
idx_dict_list[-1][(i+ii)%width + (((i//width)+jj)%height)*width] =\
current_neighbor_cell
if is_sparse:
indices = []
values = []
for i, idx_dict in enumerate(idx_dict_list):
for k in idx_dict:
indices.append([k,i])
values.append(idx_dict[k])
initial = tf.cast(tf.SparseTensor(indices=indices, values=values,\
dense_shape=[nodes, nodes]), tf.float64)
else:
conn_matrix = np.zeros((nodes, nodes))
for i, idx_dict in enumerate(idx_dict_list):
for k in idx_dict:
conn_matrix[k,i] = idx_dict[k]
initial = conn_matrix
return initial if is_sparse else tf.get_variable(name, initializer=initial)
| 31.142857
| 85
| 0.654712
| 993
| 7,194
| 4.590131
| 0.133938
| 0.033787
| 0.024133
| 0.021062
| 0.865292
| 0.845327
| 0.83348
| 0.810882
| 0.778412
| 0.747696
| 0
| 0.014593
| 0.237976
| 7,194
| 230
| 86
| 31.278261
| 0.816855
| 0.434807
| 0
| 0.481928
| 0
| 0
| 0.044304
| 0
| 0
| 0
| 0
| 0
| 0.036145
| 1
| 0.072289
| false
| 0
| 0.024096
| 0
| 0.168675
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
25ac55048a38c2c0f0fcd80dac8858260648a954
| 46
|
py
|
Python
|
setenv.py
|
kodtanactc/kodpersonal
|
25416dd492e4bcf60510e7744a88c740b3b55c72
|
[
"Apache-2.0"
] | null | null | null |
setenv.py
|
kodtanactc/kodpersonal
|
25416dd492e4bcf60510e7744a88c740b3b55c72
|
[
"Apache-2.0"
] | null | null | null |
setenv.py
|
kodtanactc/kodpersonal
|
25416dd492e4bcf60510e7744a88c740b3b55c72
|
[
"Apache-2.0"
] | null | null | null |
RedisPwd = "7rIHpYAMRCnuE93SMTXma0pd66DQECB2"
| 23
| 45
| 0.869565
| 2
| 46
| 20
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 0.065217
| 46
| 1
| 46
| 46
| 0.767442
| 0
| 0
| 0
| 0
| 0
| 0.695652
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
25b26c9a34fa0ea8b295cf14c434f0c497c969c3
| 264
|
py
|
Python
|
Section04_Prototype/RegularPrototype/Address.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | 1
|
2020-10-20T07:41:51.000Z
|
2020-10-20T07:41:51.000Z
|
Section04_Prototype/RegularPrototype/Address.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
Section04_Prototype/RegularPrototype/Address.py
|
enriqueescobar-askida/Kinito.Python
|
e4c5521e771c4de0ceaf81776a4a61f7de01edb4
|
[
"MIT"
] | null | null | null |
class Address:
def __init__(self, street_address, city, country):
self.country = country
self.city = city
self.street_address = street_address
def __str__(self):
return f'{self.street_address}, {self.city}, {self.country}'
| 29.333333
| 68
| 0.651515
| 32
| 264
| 5
| 0.34375
| 0.325
| 0.31875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238636
| 264
| 8
| 69
| 33
| 0.79602
| 0
| 0
| 0
| 0
| 0
| 0.189394
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
25ce56d35da32880b6b5c64fe2bccbcbb16795ab
| 80
|
py
|
Python
|
elabjournal/elabjournal/SampleMetas.py
|
matthijsbrouwer/elabjournal-python
|
4063b01993f0bf17ea2857009c1bedc5ace8b87b
|
[
"Apache-2.0"
] | 2
|
2021-06-29T11:17:27.000Z
|
2022-01-11T18:41:49.000Z
|
elabjournal/elabjournal/SampleMetas.py
|
matthijsbrouwer/elabjournal-python
|
4063b01993f0bf17ea2857009c1bedc5ace8b87b
|
[
"Apache-2.0"
] | null | null | null |
elabjournal/elabjournal/SampleMetas.py
|
matthijsbrouwer/elabjournal-python
|
4063b01993f0bf17ea2857009c1bedc5ace8b87b
|
[
"Apache-2.0"
] | 1
|
2019-06-06T13:23:11.000Z
|
2019-06-06T13:23:11.000Z
|
from .eLABJournalPager import *
class SampleMetas(eLABJournalPager):
pass
| 13.333333
| 36
| 0.775
| 7
| 80
| 8.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1625
| 80
| 6
| 37
| 13.333333
| 0.925373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
25eba5b91cbf6c02ccdff69efd0d18aaf1b9d75e
| 138
|
py
|
Python
|
common/aist_common/grammar/qualifier_classifier.py
|
sfahad1414/AGENT
|
84069edc96b6190bb03ffd5099cbc8966061a563
|
[
"Apache-2.0"
] | 15
|
2020-05-06T16:17:56.000Z
|
2022-03-30T12:25:16.000Z
|
common/aist_common/grammar/qualifier_classifier.py
|
dionny/AGENT
|
8a833406b590e23623fcc67db99f6f964d002396
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:17:16.000Z
|
2022-02-10T06:35:58.000Z
|
common/aist_common/grammar/qualifier_classifier.py
|
dionny/AGENT
|
8a833406b590e23623fcc67db99f6f964d002396
|
[
"Apache-2.0"
] | 7
|
2020-04-07T18:47:55.000Z
|
2022-03-30T12:14:58.000Z
|
class QualifierClassifier:
def __init__(self):
pass
@staticmethod
def get_qualifiers(act_state):
return {}
| 13.8
| 34
| 0.630435
| 13
| 138
| 6.230769
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.297101
| 138
| 9
| 35
| 15.333333
| 0.835052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
d32d57d92da53548112e980f62e724fabccdd263
| 20
|
py
|
Python
|
MLSol/__init__.py
|
nicholaslaw/MLSol
|
72e4a0195e5a2ec2a0ef5cb9c4fed949e39296e9
|
[
"Apache-2.0"
] | 1
|
2021-01-29T12:37:31.000Z
|
2021-01-29T12:37:31.000Z
|
MLSol/__init__.py
|
nicholaslaw/MLSol
|
72e4a0195e5a2ec2a0ef5cb9c4fed949e39296e9
|
[
"Apache-2.0"
] | null | null | null |
MLSol/__init__.py
|
nicholaslaw/MLSol
|
72e4a0195e5a2ec2a0ef5cb9c4fed949e39296e9
|
[
"Apache-2.0"
] | null | null | null |
from .mlsol import *
| 20
| 20
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d358dba4713d26fae0f7360f2dece69ff72e1013
| 2,841
|
py
|
Python
|
opencl/test_fitness.py
|
vyscond/sliding-puzzle-problem
|
4f79bde560abda61e3d14ba00dadef936a423926
|
[
"MIT"
] | null | null | null |
opencl/test_fitness.py
|
vyscond/sliding-puzzle-problem
|
4f79bde560abda61e3d14ba00dadef936a423926
|
[
"MIT"
] | null | null | null |
opencl/test_fitness.py
|
vyscond/sliding-puzzle-problem
|
4f79bde560abda61e3d14ba00dadef936a423926
|
[
"MIT"
] | null | null | null |
from lordran import platform_selector as ps
from lordran import sliding_puzzle_parallel as spp
def init_pop( context , random_list , puzzle ):
return spp.InitialPopulationGeneratorEnhanced( context ).execute( ps.Execution.ASYNC , random_list , [ -1 , puzzle.move_top , 1 , puzzle.move_bot ] )
if __name__ == '__main__' :
#~ puzzle = spp.Puzzle( range( 9 ) )
puzzle = spp.Puzzle( [1,0,2,3,4,5,6,7,8] )
context = ps.get_intel_context()
import time
cpu_avg_time = 0
for i in range(1):
population , offset_list , max_chromossome_size = init_pop( context , [ 10000 for i in range(10000)] , puzzle )
a = time.time()
# ( self , execution_mode , chromossome_list , chromossome_offset_list , chromossome_max_size , problem_instance ):
#~ spp.FitnessEnhanced( context , True ).execute( 0 , population , offset_list , max_chromossome_size , [1,0,2,3,4,5,6,7,8] )
#fitness_results , chromossome_list , chromossome_offset_list =
spp.FitnessEnhanced( context , False ).execute( 0 , population , offset_list , max_chromossome_size , puzzle )
a = time.time() - a
cpu_avg_time += ( a / 60 )
#~ for x in fitness_results :
#~ print x
# this is the last thing that you need to do ;)
# implement on CPU DEBUG RP fitness, ok? VryHppy :3
# ( invalid_positions_remaining ** 4 ) - ( valid_moves_qtt * 2 )
cpu_avg_time = cpu_avg_time / 1000
gpu_avg_time = 0
context = ps.get_nvidia_cuda_context()
for i in range(1):
population , offset_list , max_chromossome_size = init_pop( context , [ 10000 for i in range(10000)] , puzzle )
a = time.time()
# ( self , execution_mode , chromossome_list , chromossome_offset_list , chromossome_max_size , problem_instance ):
#~ spp.FitnessEnhanced( context , True ).execute( 0 , population , offset_list , max_chromossome_size , [1,0,2,3,4,5,6,7,8] )
#fitness_results , chromossome_list , chromossome_offset_list =
spp.FitnessEnhanced( context , False ).execute( 0 , population , offset_list , max_chromossome_size , puzzle )
a = time.time() - a
gpu_avg_time += ( a / 60 )
#~ for x in fitness_results :
#~ print x
# this is the last thing that you need to do ;)
# implement on CPU DEBUG RP fitness, ok? VryHppy :3
# ( invalid_positions_remaining ** 4 ) - ( valid_moves_qtt * 2 )
gpu_avg_time = gpu_avg_time / 1000
print 'cpu : ' , cpu_avg_time
print 'gpu : ' , gpu_avg_time
print 'gain: ' , (cpu_avg_time / gpu_avg_time)
| 33.821429
| 153
| 0.597677
| 355
| 2,841
| 4.509859
| 0.261972
| 0.052467
| 0.037477
| 0.086196
| 0.725172
| 0.703935
| 0.703935
| 0.703935
| 0.703935
| 0.698314
| 0
| 0.038598
| 0.306934
| 2,841
| 83
| 154
| 34.228916
| 0.774505
| 0.362196
| 0
| 0.357143
| 0
| 0
| 0.014493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.107143
| null | null | 0.107143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d371c9a5c1360127a63b8834006342b9636e826b
| 56,702
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_ncs5500_coherent_node_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_ncs5500_coherent_node_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_ncs5500_coherent_node_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'Coherent.Nodes.Node.CoherentTimeStats.OptsEaBulkCreate' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.OptsEaBulkCreate',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'opts-ea-bulk-create',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.OptsEaBulkUpdate' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.OptsEaBulkUpdate',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'opts-ea-bulk-update',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.DspEaBulkCreate' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.DspEaBulkCreate',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'dsp-ea-bulk-create',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.DspEaBulkUpdate' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.DspEaBulkUpdate',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'dsp-ea-bulk-update',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.PortStat.LaserOnStats' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.PortStat.LaserOnStats',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'laser-on-stats',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.PortStat.LaserOffStats' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.PortStat.LaserOffStats',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'laser-off-stats',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.PortStat.WlOpStats' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.PortStat.WlOpStats',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'wl-op-stats',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.PortStat.TxpwrOpStats' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.PortStat.TxpwrOpStats',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'txpwr-op-stats',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.PortStat.CdminOpStats' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.PortStat.CdminOpStats',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'cdmin-op-stats',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.PortStat.CdmaxOpStats' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.PortStat.CdmaxOpStats',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'cdmax-op-stats',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.PortStat.TraffictypeOpStats' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.PortStat.TraffictypeOpStats',
False,
[
_MetaInfoClassMember('end', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' end
''',
'end',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('start', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' start
''',
'start',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('time-taken', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' time taken
''',
'time_taken',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('worst-time', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' worst time
''',
'worst_time',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'traffictype-op-stats',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats.PortStat' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats.PortStat',
False,
[
_MetaInfoClassMember('cd-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' cd max
''',
'cd_max',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('cd-min', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' cd min
''',
'cd_min',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('cdmax-op-stats', REFERENCE_CLASS, 'CdmaxOpStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.PortStat.CdmaxOpStats',
[], [],
''' cdmax op stats
''',
'cdmax_op_stats',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('cdmin-op-stats', REFERENCE_CLASS, 'CdminOpStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.PortStat.CdminOpStats',
[], [],
''' cdmin op stats
''',
'cdmin_op_stats',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('laser-off-stats', REFERENCE_CLASS, 'LaserOffStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.PortStat.LaserOffStats',
[], [],
''' laser off stats
''',
'laser_off_stats',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('laser-on-stats', REFERENCE_CLASS, 'LaserOnStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.PortStat.LaserOnStats',
[], [],
''' laser on stats
''',
'laser_on_stats',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('laser-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' laser state
''',
'laser_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('traffic-type', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' traffic type
''',
'traffic_type',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('traffictype-op-stats', REFERENCE_CLASS, 'TraffictypeOpStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.PortStat.TraffictypeOpStats',
[], [],
''' traffictype op stats
''',
'traffictype_op_stats',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('tx-power', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' tx power
''',
'tx_power',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('txpwr-op-stats', REFERENCE_CLASS, 'TxpwrOpStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.PortStat.TxpwrOpStats',
[], [],
''' txpwr op stats
''',
'txpwr_op_stats',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('wavelength', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' wavelength
''',
'wavelength',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('wl-op-stats', REFERENCE_CLASS, 'WlOpStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.PortStat.WlOpStats',
[], [],
''' wl op stats
''',
'wl_op_stats',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'port-stat',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.CoherentTimeStats' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.CoherentTimeStats',
False,
[
_MetaInfoClassMember('device-created', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' device created
''',
'device_created',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('driver-init', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' driver init
''',
'driver_init',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('driver-operational', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' driver operational
''',
'driver_operational',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('dsp-controllers-created', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' dsp controllers created
''',
'dsp_controllers_created',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('dsp-ea-bulk-create', REFERENCE_CLASS, 'DspEaBulkCreate' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.DspEaBulkCreate',
[], [],
''' dsp ea bulk create
''',
'dsp_ea_bulk_create',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('dsp-ea-bulk-update', REFERENCE_CLASS, 'DspEaBulkUpdate' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.DspEaBulkUpdate',
[], [],
''' dsp ea bulk update
''',
'dsp_ea_bulk_update',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('eth-intf-created', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' eth intf created
''',
'eth_intf_created',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('optics-controllers-created', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' optics controllers created
''',
'optics_controllers_created',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('opts-ea-bulk-create', REFERENCE_CLASS, 'OptsEaBulkCreate' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.OptsEaBulkCreate',
[], [],
''' opts ea bulk create
''',
'opts_ea_bulk_create',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('opts-ea-bulk-update', REFERENCE_CLASS, 'OptsEaBulkUpdate' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.OptsEaBulkUpdate',
[], [],
''' opts ea bulk update
''',
'opts_ea_bulk_update',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('port-stat', REFERENCE_LIST, 'PortStat' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats.PortStat',
[], [],
''' port stat
''',
'port_stat',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False, max_elements=6),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'coherent-time-stats',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.Devicemapping.DevMap' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.Devicemapping.DevMap',
False,
[
_MetaInfoClassMember('device-address', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Device address
''',
'device_address',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('ifhandle', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Interface handle
''',
'ifhandle',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('intf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'intf_name',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'dev-map',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.Devicemapping' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.Devicemapping',
False,
[
_MetaInfoClassMember('dev-map', REFERENCE_LIST, 'DevMap' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.Devicemapping.DevMap',
[], [],
''' dev map
''',
'dev_map',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False, max_elements=32),
_MetaInfoClassMember('num-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of dev map entries
''',
'num_entries',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'devicemapping',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.Coherenthealth.PortData.CtpInfo' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.Coherenthealth.PortData.CtpInfo',
False,
[
_MetaInfoClassMember('clei-code-number', ATTRIBUTE, 'str' , None, None,
[(0, 10)], [],
''' CLEI code number
''',
'clei_code_number',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('ctp-type', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' ctp type
''',
'ctp_type',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('date-code-number', ATTRIBUTE, 'str' , None, None,
[(0, 10)], [],
''' date code number
''',
'date_code_number',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' description
''',
'description',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('deviation', ATTRIBUTE, 'str' , None, None,
[(0, 16)], [],
''' deviation
''',
'deviation',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('module-firmware-committed-version-number', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' module firmware committed version number
''',
'module_firmware_committed_version_number',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('module-firmware-running-version-number', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' module firmware running version number
''',
'module_firmware_running_version_number',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('module-hardware-version-number', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' module hardware version number
''',
'module_hardware_version_number',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('part-number', ATTRIBUTE, 'str' , None, None,
[(0, 16)], [],
''' part number
''',
'part_number',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('pid', ATTRIBUTE, 'str' , None, None,
[(0, 16)], [],
''' pid
''',
'pid',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('serial-number', ATTRIBUTE, 'str' , None, None,
[(0, 16)], [],
''' serial number
''',
'serial_number',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('vendorname', ATTRIBUTE, 'str' , None, None,
[(0, 16)], [],
''' vendorname
''',
'vendorname',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('vid', ATTRIBUTE, 'str' , None, None,
[(0, 16)], [],
''' vid
''',
'vid',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'ctp-info',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo.EthData' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo.EthData',
False,
[
_MetaInfoClassMember('admin-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' admin state
''',
'admin_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('ifname', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' ifname
''',
'ifname',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('intf-handle', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' intf handle
''',
'intf_handle',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'eth-data',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo',
False,
[
_MetaInfoClassMember('eth-data', REFERENCE_LIST, 'EthData' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo.EthData',
[], [],
''' eth data
''',
'eth_data',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False, max_elements=2),
_MetaInfoClassMember('intf-count', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' intf count
''',
'intf_count',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'interface-info',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.Coherenthealth.PortData' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.Coherenthealth.PortData',
False,
[
_MetaInfoClassMember('ctp-info', REFERENCE_CLASS, 'CtpInfo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.Coherenthealth.PortData.CtpInfo',
[], [],
''' ctp info
''',
'ctp_info',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('dsp-admin-up', ATTRIBUTE, 'bool' , None, None,
[], [],
''' dsp admin up
''',
'dsp_admin_up',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('dsp-ctrl-created', ATTRIBUTE, 'bool' , None, None,
[], [],
''' dsp ctrl created
''',
'dsp_ctrl_created',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('fp-port-idx', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' fp port idx
''',
'fp_port_idx',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('has-pluggable', ATTRIBUTE, 'bool' , None, None,
[], [],
''' has pluggable
''',
'has_pluggable',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('interface-info', REFERENCE_CLASS, 'InterfaceInfo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo',
[], [],
''' interface info
''',
'interface_info',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('laser-op-rc', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' laser op rc
''',
'laser_op_rc',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('laser-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' laser state
''',
'laser_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('optics-admin-up', ATTRIBUTE, 'bool' , None, None,
[], [],
''' optics admin up
''',
'optics_admin_up',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('optics-ctrl-created', ATTRIBUTE, 'bool' , None, None,
[], [],
''' optics ctrl created
''',
'optics_ctrl_created',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('traffic-op-rc', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' traffic op rc
''',
'traffic_op_rc',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('traffic-type', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' traffic type
''',
'traffic_type',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('wavelength', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' wavelength
''',
'wavelength',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('wlen-op-rc', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' wlen op rc
''',
'wlen_op_rc',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'port-data',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.Coherenthealth' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.Coherenthealth',
False,
[
_MetaInfoClassMember('aipc-srvr-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' aipc srvr state
''',
'aipc_srvr_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('board-type', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' board type
''',
'board_type',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('denali-version', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' denali version
''',
'denali_version',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('dsp-ea-conn', ATTRIBUTE, 'bool' , None, None,
[], [],
''' dsp ea conn
''',
'dsp_ea_conn',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('im-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' im state
''',
'im_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('jlink-op', ATTRIBUTE, 'str' , None, None,
[(0, 1024)], [],
''' jlink op
''',
'jlink_op',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('morgoth-alive', ATTRIBUTE, 'bool' , None, None,
[], [],
''' morgoth alive
''',
'morgoth_alive',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('morgoth-downloaded-version', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' morgoth downloaded version
''',
'morgoth_downloaded_version',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('morgoth-golden-version', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' morgoth golden version
''',
'morgoth_golden_version',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('morgoth-running-version', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' morgoth running version
''',
'morgoth_running_version',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('optics-ea-conn', ATTRIBUTE, 'bool' , None, None,
[], [],
''' optics ea conn
''',
'optics_ea_conn',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('pm-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' pm state
''',
'pm_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('port-data', REFERENCE_LIST, 'PortData' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.Coherenthealth.PortData',
[], [],
''' port data
''',
'port_data',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False, max_elements=6),
_MetaInfoClassMember('prov-infra-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' prov infra state
''',
'prov_infra_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('sdk-fpga-compatible', ATTRIBUTE, 'bool' , None, None,
[], [],
''' sdk fpga compatible
''',
'sdk_fpga_compatible',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('sdk-version', ATTRIBUTE, 'str' , None, None,
[(0, 255)], [],
''' sdk version
''',
'sdk_version',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('sysdb-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' sysdb state
''',
'sysdb_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('vether-state', ATTRIBUTE, 'bool' , None, None,
[], [],
''' vether state
''',
'vether_state',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'coherenthealth',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.PortModeAllInfo.PortmodeEntry' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.PortModeAllInfo.PortmodeEntry',
False,
[
_MetaInfoClassMember('diff', ATTRIBUTE, 'str' , None, None,
[], [],
''' Optics diff
''',
'diff',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('fec', ATTRIBUTE, 'str' , None, None,
[], [],
''' Optics fec
''',
'fec',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('intf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'intf_name',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('modulation', ATTRIBUTE, 'str' , None, None,
[], [],
''' Optics modulation
''',
'modulation',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('speed', ATTRIBUTE, 'str' , None, None,
[], [],
''' Optics speed
''',
'speed',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'portmode-entry',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node.PortModeAllInfo' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node.PortModeAllInfo',
False,
[
_MetaInfoClassMember('num-entries', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of dev map entries
''',
'num_entries',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('portmode-entry', REFERENCE_LIST, 'PortmodeEntry' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.PortModeAllInfo.PortmodeEntry',
[], [],
''' portmode entry
''',
'portmode_entry',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False, max_elements=32),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'port-mode-all-info',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes.Node' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes.Node',
False,
[
_MetaInfoClassMember('node-name', ATTRIBUTE, 'str' , None, None,
[], ['([a-zA-Z0-9_]*\\d+/){1,2}([a-zA-Z0-9_]*\\d+)'],
''' The node name
''',
'node_name',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', True),
_MetaInfoClassMember('coherent-time-stats', REFERENCE_CLASS, 'CoherentTimeStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.CoherentTimeStats',
[], [],
''' Coherent driver performace information
''',
'coherent_time_stats',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('coherenthealth', REFERENCE_CLASS, 'Coherenthealth' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.Coherenthealth',
[], [],
''' Coherent node data for driver health
''',
'coherenthealth',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('devicemapping', REFERENCE_CLASS, 'Devicemapping' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.Devicemapping',
[], [],
''' Coherent node data for device _mapping
''',
'devicemapping',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
_MetaInfoClassMember('port-mode-all-info', REFERENCE_CLASS, 'PortModeAllInfo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node.PortModeAllInfo',
[], [],
''' PortMode all operational data
''',
'port_mode_all_info',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'node',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent.Nodes' : {
'meta_info' : _MetaInfoClass('Coherent.Nodes',
False,
[
_MetaInfoClassMember('node', REFERENCE_LIST, 'Node' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes.Node',
[], [],
''' Coherent discovery operational data for a
particular node
''',
'node',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'nodes',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
'Coherent' : {
'meta_info' : _MetaInfoClass('Coherent',
False,
[
_MetaInfoClassMember('nodes', REFERENCE_CLASS, 'Nodes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper', 'Coherent.Nodes',
[], [],
''' Coherent list of nodes
''',
'nodes',
'Cisco-IOS-XR-ncs5500-coherent-node-oper', False),
],
'Cisco-IOS-XR-ncs5500-coherent-node-oper',
'coherent',
_yang_ns._namespaces['Cisco-IOS-XR-ncs5500-coherent-node-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_coherent_node_oper'
),
},
}
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat.LaserOnStats']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat.LaserOffStats']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat.WlOpStats']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat.TxpwrOpStats']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat.CdminOpStats']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat.CdmaxOpStats']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat.TraffictypeOpStats']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.OptsEaBulkCreate']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.OptsEaBulkUpdate']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.DspEaBulkCreate']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.DspEaBulkUpdate']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats.PortStat']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.CoherentTimeStats']['meta_info']
_meta_table['Coherent.Nodes.Node.Devicemapping.DevMap']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.Devicemapping']['meta_info']
_meta_table['Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo.EthData']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo']['meta_info']
_meta_table['Coherent.Nodes.Node.Coherenthealth.PortData.CtpInfo']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.Coherenthealth.PortData']['meta_info']
_meta_table['Coherent.Nodes.Node.Coherenthealth.PortData.InterfaceInfo']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.Coherenthealth.PortData']['meta_info']
_meta_table['Coherent.Nodes.Node.Coherenthealth.PortData']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.Coherenthealth']['meta_info']
_meta_table['Coherent.Nodes.Node.PortModeAllInfo.PortmodeEntry']['meta_info'].parent =_meta_table['Coherent.Nodes.Node.PortModeAllInfo']['meta_info']
_meta_table['Coherent.Nodes.Node.CoherentTimeStats']['meta_info'].parent =_meta_table['Coherent.Nodes.Node']['meta_info']
_meta_table['Coherent.Nodes.Node.Devicemapping']['meta_info'].parent =_meta_table['Coherent.Nodes.Node']['meta_info']
_meta_table['Coherent.Nodes.Node.Coherenthealth']['meta_info'].parent =_meta_table['Coherent.Nodes.Node']['meta_info']
_meta_table['Coherent.Nodes.Node.PortModeAllInfo']['meta_info'].parent =_meta_table['Coherent.Nodes.Node']['meta_info']
_meta_table['Coherent.Nodes.Node']['meta_info'].parent =_meta_table['Coherent.Nodes']['meta_info']
_meta_table['Coherent.Nodes']['meta_info'].parent =_meta_table['Coherent']['meta_info']
| 49.738596
| 233
| 0.503051
| 4,886
| 56,702
| 5.633238
| 0.043185
| 0.082837
| 0.103546
| 0.145764
| 0.870767
| 0.84588
| 0.819612
| 0.78179
| 0.72573
| 0.684966
| 0
| 0.04032
| 0.357024
| 56,702
| 1,139
| 234
| 49.782265
| 0.714631
| 0
| 0
| 0.539235
| 0
| 0.001006
| 0.419841
| 0.324952
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008048
| 0
| 0.008048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d398da1a4c2daee667a87648532c8c8ca0232439
| 128
|
py
|
Python
|
vestaboard/vbUrls.py
|
Xaelias/Vestaboard
|
a1bc7b6efe6b2ad49079e851c09542c2b9452409
|
[
"MIT"
] | 19
|
2020-04-17T04:59:41.000Z
|
2022-01-27T23:21:28.000Z
|
vestaboard/vbUrls.py
|
Xaelias/Vestaboard
|
a1bc7b6efe6b2ad49079e851c09542c2b9452409
|
[
"MIT"
] | 11
|
2020-04-17T04:59:19.000Z
|
2022-01-07T00:18:34.000Z
|
vestaboard/vbUrls.py
|
Xaelias/Vestaboard
|
a1bc7b6efe6b2ad49079e851c09542c2b9452409
|
[
"MIT"
] | 10
|
2021-02-08T18:35:23.000Z
|
2022-01-27T23:21:23.000Z
|
subscription = 'https://platform.vestaboard.com/subscriptions'
post = "https://platform.vestaboard.com/subscriptions/{}/message"
| 64
| 65
| 0.789063
| 13
| 128
| 7.769231
| 0.615385
| 0.257426
| 0.455446
| 0.514851
| 0.772277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039063
| 128
| 2
| 65
| 64
| 0.821138
| 0
| 0
| 0
| 0
| 0
| 0.782946
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6ca1d9b37d35e8a105a9ff80d0e5c78f8aa49f79
| 177
|
py
|
Python
|
udiary/udiaryapp/views.py
|
michaelzadra1/udiary
|
3acaf154173971e2702307218105c8961c00771a
|
[
"MIT"
] | null | null | null |
udiary/udiaryapp/views.py
|
michaelzadra1/udiary
|
3acaf154173971e2702307218105c8961c00771a
|
[
"MIT"
] | null | null | null |
udiary/udiaryapp/views.py
|
michaelzadra1/udiary
|
3acaf154173971e2702307218105c8961c00771a
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return HttpResponse("Hello world. Welcome to uDiary!")
| 22.125
| 55
| 0.79096
| 24
| 177
| 5.833333
| 0.833333
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 177
| 7
| 56
| 25.285714
| 0.915033
| 0.129944
| 0
| 0
| 0
| 0
| 0.203947
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
6ce4437d9f2bf91d703787cb2b60ea9bfbaa80d1
| 92
|
py
|
Python
|
memformer/__init__.py
|
lucidrains/memformer
|
c8f9a97f8b3fc7a90c21f5d3ae911499bf13e50a
|
[
"MIT"
] | 62
|
2020-10-10T04:56:17.000Z
|
2022-02-18T21:17:14.000Z
|
memformer/__init__.py
|
lucidrains/memformer
|
c8f9a97f8b3fc7a90c21f5d3ae911499bf13e50a
|
[
"MIT"
] | 3
|
2020-10-30T17:59:35.000Z
|
2021-07-09T09:43:24.000Z
|
memformer/__init__.py
|
lucidrains/memformer
|
c8f9a97f8b3fc7a90c21f5d3ae911499bf13e50a
|
[
"MIT"
] | 4
|
2020-10-30T15:31:00.000Z
|
2022-01-18T10:23:27.000Z
|
from memformer.memformer import Memformer
from memformer.mrbp import memory_replay_backprop
| 30.666667
| 49
| 0.891304
| 12
| 92
| 6.666667
| 0.583333
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 92
| 2
| 50
| 46
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9f11818b506c169e7a9be67d144ab57cece4bef5
| 374
|
py
|
Python
|
src/hello.py
|
kabrezi/bstrip
|
9386c79434c892d0dbef1263b9f7f1adb598bb5f
|
[
"Apache-2.0"
] | null | null | null |
src/hello.py
|
kabrezi/bstrip
|
9386c79434c892d0dbef1263b9f7f1adb598bb5f
|
[
"Apache-2.0"
] | null | null | null |
src/hello.py
|
kabrezi/bstrip
|
9386c79434c892d0dbef1263b9f7f1adb598bb5f
|
[
"Apache-2.0"
] | null | null | null |
import os
OAL = 4
print('sudo /home/pi/bstrip/src/Cutwire1.py -a advance_' + str(OAL))
def main():
os.system('sudo /home/pi/bstrip/src/Cutwire1.py -a advance_' + str(OAL))
main()
#os.system('sudo /home/pi/bstrip/src/Cutwire1.py -l ' + OAL[0] + ' -s ' + Strip_A[0] + ' -c ' + Stip_B[0] + ' -n ' + Quantity[0])
#main()
#sudo /home/pi/bstrip/src/Cutwire1.py -a advance_4
| 31.166667
| 129
| 0.631016
| 66
| 374
| 3.5
| 0.409091
| 0.138528
| 0.17316
| 0.277056
| 0.761905
| 0.761905
| 0.761905
| 0.761905
| 0.761905
| 0.601732
| 0
| 0.031348
| 0.147059
| 374
| 12
| 130
| 31.166667
| 0.69279
| 0.489305
| 0
| 0
| 0
| 0
| 0.510638
| 0.329787
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.333333
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9f1d60b83d1826a4e62e9336d7e7bf4d03d90672
| 11,128
|
py
|
Python
|
kolibri/core/content/test/test_upgrade.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 545
|
2016-01-19T19:26:55.000Z
|
2022-03-20T00:13:04.000Z
|
kolibri/core/content/test/test_upgrade.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 8,329
|
2016-01-19T19:32:02.000Z
|
2022-03-31T21:23:12.000Z
|
kolibri/core/content/test/test_upgrade.py
|
MBKayro/kolibri
|
0a38a5fb665503cf8f848b2f65938e73bfaa5989
|
[
"MIT"
] | 493
|
2016-01-19T19:26:48.000Z
|
2022-03-28T14:35:05.000Z
|
import tempfile
import uuid
from django.core.management import call_command
from django.test import TransactionTestCase
from le_utils.constants import content_kinds
from mock import call
from mock import patch
from .sqlalchemytesting import django_connection_engine
from kolibri.core.content.constants.schema_versions import CONTENT_SCHEMA_VERSION
from kolibri.core.content.models import ChannelMetadata
from kolibri.core.content.models import ContentNode
from kolibri.core.content.upgrade import fix_multiple_trees_with_tree_id1
from kolibri.core.content.upgrade import update_num_coach_contents
def get_engine(connection_string):
return django_connection_engine()
test_channel_id = "6199dde695db4ee4ab392222d5af1e5c"
mock_content_file = tempfile.mkstemp()
mock_content_db_file = tempfile.mkstemp()
@patch("kolibri.core.content.upgrade.import_channel_from_local_db")
class FixMultipleTreesWithId1TestCase(TransactionTestCase):
fixtures = ["content_test.json"]
def execute(self):
fix_multiple_trees_with_tree_id1()
@patch(
"kolibri.core.content.upgrade.get_content_database_file_path",
return_value=mock_content_file[1],
)
def test_extra_channel_contentdb_exists(self, path_mock, import_mock):
root_node = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
ChannelMetadata.objects.create(
id=root_node.channel_id,
root=root_node,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
# Do this to side step django mptts auto tree_id code
ContentNode.objects.filter(parent=None).update(tree_id=1)
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 2)
self.execute()
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 1)
import_mock.assert_called_with(root_node.channel_id)
@patch(
"kolibri.core.content.upgrade.get_content_database_file_path",
return_value=mock_content_file[1],
)
def test_two_extra_channels_contentdb_exists(self, path_mock, import_mock):
root_node_1 = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
ChannelMetadata.objects.create(
id=root_node_1.channel_id,
root=root_node_1,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
root_node_2 = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
# Add an additional node so that root_node_1 channel is processed first.
ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node_2.channel_id,
parent=root_node_2,
)
ChannelMetadata.objects.create(
id=root_node_2.channel_id,
root=root_node_2,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
# Do this to side step django mptts auto tree_id code
ContentNode.objects.filter(parent=None).update(tree_id=1)
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 3)
self.execute()
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 1)
import_mock.assert_has_calls(
[call(root_node_1.channel_id), call(root_node_2.channel_id)]
)
@patch(
"kolibri.core.content.upgrade.get_content_database_file_path", return_value=""
)
def test_extra_channel_no_contentdb_exists(self, path_mock, import_mock):
root_node = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
ChannelMetadata.objects.create(
id=root_node.channel_id,
root=root_node,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
# Do this to side step django mptts auto tree_id code
ContentNode.objects.filter(parent=None).update(tree_id=1)
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 2)
self.execute()
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 2)
import_mock.assert_not_called()
@patch(
"kolibri.core.content.upgrade.get_content_database_file_path",
side_effect=["", mock_content_file[1]],
)
def test_two_extra_channels_one_contentdb_exists(self, path_mock, import_mock):
root_node_1 = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
ChannelMetadata.objects.create(
id=root_node_1.channel_id,
root=root_node_1,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
root_node_2 = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
# Add an additional node so that root_node_1 channel is processed first.
ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node_2.channel_id,
parent=root_node_2,
)
ChannelMetadata.objects.create(
id=root_node_2.channel_id,
root=root_node_2,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
# Do this to side step django mptts auto tree_id code
ContentNode.objects.filter(parent=None).update(tree_id=1)
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 3)
self.execute()
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 2)
with self.assertRaises(AssertionError):
import_mock.assert_called_with(root_node_1.channel_id)
import_mock.assert_called_with(root_node_2.channel_id)
@patch("kolibri.core.content.utils.sqlalchemybridge.get_engine", new=get_engine)
class UpdateNumCoachContents(TransactionTestCase):
fixtures = ["content_test.json"]
def setUp(self):
super(UpdateNumCoachContents, self).setUp()
ContentNode.objects.all().update(available=False)
def test_no_content_nodes_coach_content(self):
ContentNode.objects.all().update(available=True)
ContentNode.objects.all().update(coach_content=False)
update_num_coach_contents()
root = ChannelMetadata.objects.get(id=test_channel_id).root
self.assertEqual(root.num_coach_contents, 0)
def test_all_root_content_nodes_coach_content(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
ContentNode.objects.filter(parent=root_node).exclude(
kind=content_kinds.TOPIC
).update(coach_content=True)
update_num_coach_contents()
root_node.refresh_from_db()
self.assertEqual(root_node.num_coach_contents, 2)
def test_one_root_content_node_coach_content(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
node = (
ContentNode.objects.filter(parent=root_node)
.exclude(kind=content_kinds.TOPIC)
.first()
)
node.coach_content = True
node.save()
update_num_coach_contents()
root_node.refresh_from_db()
self.assertEqual(root_node.num_coach_contents, 1)
def test_one_root_topic_node_coach_content(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
node = ContentNode.objects.filter(
parent=root_node, kind=content_kinds.TOPIC
).first()
node.coach_content = True
node.save()
update_num_coach_contents()
root_node.refresh_from_db()
self.assertEqual(root_node.num_coach_contents, 0)
def test_one_child_node_coach_content(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
node = ContentNode.objects.filter(
parent=root_node, kind=content_kinds.TOPIC
).first()
ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=node,
kind=content_kinds.VIDEO,
available=True,
coach_content=True,
)
update_num_coach_contents()
root_node.refresh_from_db()
node.refresh_from_db()
self.assertEqual(root_node.num_coach_contents, 1)
self.assertEqual(node.num_coach_contents, 1)
def test_one_child_coach_content_parent_no_siblings(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
topic_node = ContentNode.objects.filter(
parent=root_node, kind=content_kinds.TOPIC
).first()
parent_node = ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=topic_node,
kind=content_kinds.TOPIC,
available=True,
coach_content=False,
)
ContentNode.objects.create(
title="test2",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=parent_node,
kind=content_kinds.VIDEO,
available=True,
coach_content=True,
)
ContentNode.objects.create(
title="test3",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=parent_node,
kind=content_kinds.VIDEO,
available=True,
coach_content=False,
)
update_num_coach_contents()
parent_node.refresh_from_db()
self.assertEqual(parent_node.num_coach_contents, 1)
def tearDown(self):
call_command("flush", interactive=False)
super(UpdateNumCoachContents, self).tearDown()
| 37.85034
| 87
| 0.654295
| 1,325
| 11,128
| 5.205283
| 0.100377
| 0.059156
| 0.047847
| 0.060896
| 0.827026
| 0.803393
| 0.749746
| 0.728868
| 0.722923
| 0.707699
| 0
| 0.013029
| 0.248203
| 11,128
| 293
| 88
| 37.979522
| 0.811379
| 0.031362
| 0
| 0.626923
| 0
| 0
| 0.046045
| 0.035184
| 0
| 0
| 0
| 0
| 0.080769
| 1
| 0.053846
| false
| 0
| 0.088462
| 0.003846
| 0.161538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9f374414105bd6faa4e6103acad61f7686817d67
| 3,085
|
py
|
Python
|
src/rating/manager/rating_instances.py
|
alterway/rating-operator-manager
|
985252dbfa300a83938d6b19b8aa93c7c865916e
|
[
"Apache-2.0"
] | 11
|
2021-02-16T17:11:36.000Z
|
2021-11-09T16:28:15.000Z
|
src/rating/manager/rating_instances.py
|
alterway/rating-operator-manager
|
985252dbfa300a83938d6b19b8aa93c7c865916e
|
[
"Apache-2.0"
] | null | null | null |
src/rating/manager/rating_instances.py
|
alterway/rating-operator-manager
|
985252dbfa300a83938d6b19b8aa93c7c865916e
|
[
"Apache-2.0"
] | null | null | null |
from logging import Logger
from typing import Dict
import kopf
import requests
from datetime import datetime as dt
from rating.manager import utils
@kopf.on.create('rating.alterway.fr', 'v1', 'ratingruleinstances')
@kopf.on.update('rating.alterway.fr', 'v1', 'ratingruleinstances')
@utils.assert_rating_namespace
def rating_instances_creation(body: Dict,
spec: Dict,
logger: Logger,
**kwargs: Dict):
"""
Create values of RatingRuleInstances through rating-api after creation in kubernetes.
:body (Dict) A dictionary containing the created kubernetes object.
:spec (Dict) A smaller version of body.
:logger (Logger) A Logger object to log informations.
:kwargs (Dict) A dictionary holding unused parameters.
"""
if 'cpu' in spec.keys() and 'memory' in spec.keys() and 'price' in spec.keys():
rules_name = body['metadata']['name']
data = {
'metric_name': spec.get('name', {}),
'timeframe': spec.get('timeframe', {}),
'cpu': spec.get('cpu', {}),
'memory': spec.get('memory', {}),
'price': spec.get('price', {}),
}
try:
utils.post_for_rating_api(endpoint='/templates/metric/add',
payload=data)
except utils.ConfigurationExceptionError as exc:
logger.error(f'RatingRulesInstance {rules_name} is invalid. Reason: {exc}')
except requests.exceptions.RequestException:
logger.error(f'Request for RatingRulesInstance {rules_name} update failed')
else:
logger.info(
f'RatingRule {rules_name} created/updated.')
@kopf.on.delete('rating.alterway.fr', 'v1', 'ratingruleinstances')
@utils.assert_rating_namespace
def rating_instances_deletion(body: Dict,
spec: Dict,
logger: Logger,
**kwargs: Dict):
"""
Delete values of RatingRuleInstances through rating-api after creation in kubernetes.
:body (Dict) A dictionary containing the deleted kubernetes object.
:spec (Dict) A smaller version of body.
:logger (Logger) A Logger object to log informations.
:kwargs (Dict) A dictionary holding unused parameters.
"""
if 'cpu' in spec.keys() and 'memory' in spec.keys() and 'price' in spec.keys():
rules_name = body['metadata']['name']
data = {
'metric_name': spec.get('name', {}),
}
try:
utils.post_for_rating_api(endpoint='/templates/metric/delete',
payload=data)
except utils.ConfigurationExceptionError as exc:
logger.error(f'RatingRulesInstance {rules_name} is invalid. Reason: {exc}')
except requests.exceptions.RequestException:
logger.error(f'Request for RatingRulesInstance {rules_name} delete failed')
else:
logger.info(
f'RatingRule {rules_name} deleted.')
| 41.133333
| 89
| 0.603566
| 330
| 3,085
| 5.569697
| 0.272727
| 0.039173
| 0.032644
| 0.028292
| 0.826442
| 0.806311
| 0.806311
| 0.806311
| 0.721436
| 0.670294
| 0
| 0.001363
| 0.286548
| 3,085
| 75
| 90
| 41.133333
| 0.833712
| 0.196759
| 0
| 0.566038
| 0
| 0
| 0.245861
| 0.018626
| 0
| 0
| 0
| 0
| 0.037736
| 1
| 0.037736
| false
| 0
| 0.113208
| 0
| 0.150943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9f6619f5e5b2722b43c1c5853bc91f32ba40b670
| 145
|
py
|
Python
|
models/__init__.py
|
zhangsiyu1103/ESNAC
|
ecec9860d1fb9f82a61c61dc2bcd1b17d58352dd
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
zhangsiyu1103/ESNAC
|
ecec9860d1fb9f82a61c61dc2bcd1b17d58352dd
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
zhangsiyu1103/ESNAC
|
ecec9860d1fb9f82a61c61dc2bcd1b17d58352dd
|
[
"MIT"
] | null | null | null |
from .extension import *
from .resnet_m import *
from .vgg_m import *
from .shufflenet_m import *
from .alexnet_m import *
from .sample import *
| 20.714286
| 27
| 0.751724
| 22
| 145
| 4.772727
| 0.409091
| 0.47619
| 0.419048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165517
| 145
| 6
| 28
| 24.166667
| 0.867769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
9f7d81551ac880094346429c772ed657c20f5206
| 99
|
py
|
Python
|
pystiche/ops/__init__.py
|
jbueltemeier/pystiche
|
0d0707121e63c4355303446e62a4894e86a7b763
|
[
"BSD-3-Clause"
] | null | null | null |
pystiche/ops/__init__.py
|
jbueltemeier/pystiche
|
0d0707121e63c4355303446e62a4894e86a7b763
|
[
"BSD-3-Clause"
] | null | null | null |
pystiche/ops/__init__.py
|
jbueltemeier/pystiche
|
0d0707121e63c4355303446e62a4894e86a7b763
|
[
"BSD-3-Clause"
] | null | null | null |
from .comparison import *
from .container import *
from .op import *
from .regularization import *
| 19.8
| 29
| 0.757576
| 12
| 99
| 6.25
| 0.5
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161616
| 99
| 4
| 30
| 24.75
| 0.903614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9f8c7efbd20aec3199f8b0ef0af080be35192260
| 193
|
py
|
Python
|
theremini_player/setup.py
|
civerachb-cpr/theremini_ros
|
162aabd0993b782869e3fbe69e090c8c8c9ca042
|
[
"BSD-3-Clause"
] | null | null | null |
theremini_player/setup.py
|
civerachb-cpr/theremini_ros
|
162aabd0993b782869e3fbe69e090c8c8c9ca042
|
[
"BSD-3-Clause"
] | null | null | null |
theremini_player/setup.py
|
civerachb-cpr/theremini_ros
|
162aabd0993b782869e3fbe69e090c8c8c9ca042
|
[
"BSD-3-Clause"
] | null | null | null |
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['theremini_player'], package_dir={'': 'src'})
setup(**d)
| 24.125
| 60
| 0.792746
| 26
| 193
| 5.576923
| 0.615385
| 0.234483
| 0.303448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088083
| 193
| 7
| 61
| 27.571429
| 0.823864
| 0
| 0
| 0
| 1
| 0
| 0.098446
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
9fbed254e77abddc91ffe591cfeb37ab31231580
| 22,311
|
py
|
Python
|
mkhockeydata.py
|
GameMaker2k/Neo-Hockey-Test
|
5737bfedf0d83f69964e85ac1dbf7e6a93c13f44
|
[
"BSD-3-Clause"
] | 1
|
2020-04-04T10:25:42.000Z
|
2020-04-04T10:25:42.000Z
|
mkhockeydata.py
|
GameMaker2k/Neo-Hockey-Test
|
5737bfedf0d83f69964e85ac1dbf7e6a93c13f44
|
[
"BSD-3-Clause"
] | null | null | null |
mkhockeydata.py
|
GameMaker2k/Neo-Hockey-Test
|
5737bfedf0d83f69964e85ac1dbf7e6a93c13f44
|
[
"BSD-3-Clause"
] | 3
|
2021-09-07T08:44:33.000Z
|
2021-12-07T23:49:39.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
This program is free software; you can redistribute it and/or modify
it under the terms of the Revised BSD License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Revised BSD License for more details.
Copyright 2015-2021 Game Maker 2k - https://github.com/GameMaker2k
Copyright 2015-2021 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
$FileInfo: mkhockeydata.py - Last Update: 3/17/2021 Ver. 0.6.0 RC 1 - Author: cooldude2k $
'''
from __future__ import absolute_import, division, print_function, unicode_literals;
import sys, os, libhockeydata, argparse, logging, shutil;
__project__ = libhockeydata.__project__;
__program_name__ = libhockeydata.__program_name__;
__project_url__ = libhockeydata.__project_url__;
__version_info__ = libhockeydata.__version_info__;
__version_date_info__ = libhockeydata.__version_date_info__;
__version_date__ = libhockeydata.__version_date__;
__version_date_plusrc__ = libhockeydata.__version_date_plusrc__;
__version__ = libhockeydata.__version__;
getactlist = ["mkhockeyxmlfile", "mkhockeyxmlfromolddatabase", "mkhockeyxmlfromsql", "mkhockeydatabase", "mkhockeydatabasefromsql", "mkhockeypyfromdatabase", "mkhockeypyfromxmlfile", "mkhockeypyaltfromdatabase", "mkhockeypyaltfromxmlfile", "mkhockeysqlfromdatabase", "mkhockeysqlfromxmlfile", "mkhockeyjsonfromxml", "mkhockeyxmlfromjson", "mkhockeyxmlfileclean", "help", "h", "version", "ver", "v", "mksymlinks", "mksymlink"];
getactdesc = ["convert hockey sqlite database to hockey xml file", "convert old hockey sqlite database to hockey xml file", "convert hockey sql dump file to hockey xml file", "convert hockey xml file to hockey sqlite database", "convert hockey sql dump file to sqlite database", "convert hockey sqlite database to hockey python file", "convert hockey xml file to hockey python file", "convert hockey sqlite database to hockey python alt file", "convert hockey xml file to hockey python alt file", "convert hockey sqlite database to hockey sql dump file", "convert hockey xml file to hockey sql dump file", "convert hockey xml file to hockey json file", "convert hockey json file to hockey xml file", "cleanup hockey xml files", "show this help page", "get version number of "+__project__, "make symbolic links"];
gethelplist = ["mkhockeyxmlfile", "mkhockeyxmlfromolddatabase", "mkhockeyxmlfromsql", "mkhockeydatabase", "mkhockeydatabasefromsql", "mkhockeypyfromdatabase", "mkhockeypyfromxmlfile", "mkhockeypyaltfromdatabase", "mkhockeypyaltfromxmlfile", "mkhockeysqlfromdatabase", "mkhockeysqlfromxmlfile", "mkhockeyjsonfromxml", "mkhockeyxmlfromjson", "mkhockeyxmlfileclean", "help", "version", "mksymlinks"];
getsymlist = ["mkhockeyxmlfile", "mkhockeyxmlfromolddatabase", "mkhockeyxmlfromsql", "mkhockeydatabase", "mkhockeydatabasefromsql", "mkhockeypyfromdatabase", "mkhockeypyfromxmlfile", "mkhockeypyaltfromdatabase", "mkhockeypyaltfromxmlfile", "mkhockeysqlfromdatabase", "mkhockeysqlfromxmlfile", "mkhockeyjsonfromxml", "mkhockeyxmlfromjson", "mkhockeyxmlfileclean"];
defaction = getactlist[14];
defxmlfile = libhockeydata.defaultxmlfile;
defsdbfile = libhockeydata.defaultsdbfile;
defoldsdbfile = libhockeydata.defaultoldsdbfile;
defpyfile = libhockeydata.defaultpyfile;
defsqlfile = libhockeydata.defaultsqlfile;
defjsonfile = libhockeydata.defaultjsonfile;
getactstr = "Actions: ";
getverstr = __project__+" "+__version__;
for getactsublist, getactsubdesc in zip(gethelplist, getactdesc):
getactstr = getactstr+"\n"+getactsublist+": "+getactsubdesc+" ";
getactstr = getactstr.strip();
curaction = defaction;
cursaction = os.path.splitext(os.path.basename(sys.argv[0]))[0];
cursactionspt = list(cursaction.split("-"));
if(len(cursactionspt)<=1):
cursaction = cursactionspt[0];
if(len(cursactionspt)>1):
for cursactionact in cursactionspt:
if(cursactionact in getactlist):
cursaction = cursactionact;
break;
if(cursaction in getactlist):
curaction = cursaction;
if(len(sys.argv)>=2):
if(sys.argv[1] not in getactlist):
curaction = defaction;
if(cursaction in getactlist):
curaction = cursaction;
if(len(sys.argv)>1):
for curargact in sys.argv:
if(curargact in getactlist):
curaction = curargact;
break;
if(curaction==getactlist[14] or curaction==getactlist[15]):
print(getactstr);
if(curaction==getactlist[16] or curaction==getactlist[17] or curaction==getactlist[18]):
print(getverstr);
if((curaction==getactlist[19] or curaction==getactlist[20])):
for cursymact in getsymlist:
curscrpath = os.path.dirname(sys.argv[0]);
infilename = sys.argv[0];
infilenameinfo = os.path.splitext(sys.argv[0]);
if(curscrpath==""):
curscrpath = ".";
if(os.sep=="\\"):
curscrpath = curscrpath.replace(os.sep, "/");
infilename = infilename.replace(os.sep, "/");
curscrpath = curscrpath+"/";
outfilename = curscrpath+cursymact;
outfileext = str(infilenameinfo[1]).rstrip(".");
outfilefull = outfilename+outfileext;
try:
os.symlink(infilename, outfilefull);
print("'"+outfilefull+"' -> '"+infilename+"'");
except OSError:
shutil.copy2(infilename, outfilefull);
print("'"+outfilefull+"' -> '"+infilename+"'");
except AttributeError:
shutil.copy2(infilename, outfilefull);
print("'"+outfilefull+"' -> '"+infilename+"'");
if(curaction==getactlist[0]):
argparser = argparse.ArgumentParser(description=getactdesc[0], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defsdbfile), help="sqlite database to convert");
argparser.add_argument("-o", "-t", "--outfile", default=None, help="xml file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyXMLFileFromHockeyDatabase(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[1]):
argparser = argparse.ArgumentParser(description=getactdesc[1], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defoldsdbfile), help="sqlite database to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="xml file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyXMLFileFromOldHockeyDatabase(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[2]):
argparser = argparse.ArgumentParser(description=getactdesc[2], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defsqlfile), help="sql dump file to import");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="xml file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyXMLFileFromHockeySQL(getargs.infile, None, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[3]):
argparser = argparse.ArgumentParser(description=getactdesc[3], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defxmlfile), help="xml file to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="sqlite database to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyDatabaseFromHockeyXML(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[4]):
argparser = argparse.ArgumentParser(description=getactdesc[4], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defsqlfile), help="sql dump file to import");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="sqlite database to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyDatabaseFromHockeySQL(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[5]):
argparser = argparse.ArgumentParser(description=getactdesc[5], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defsdbfile), help="sqlite database to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="python file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyPythonFileFromHockeyDatabase(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[6]):
argparser = argparse.ArgumentParser(description=getactdesc[6], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defxmlfile), help="xml file to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="python file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyPythonFileFromHockeyXML(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[7]):
argparser = argparse.ArgumentParser(description=getactdesc[5], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defsdbfile), help="sqlite database to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="python file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyPythonAltFileFromHockeyDatabase(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[8]):
argparser = argparse.ArgumentParser(description=getactdesc[6], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defxmlfile), help="xml file to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="python file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyPythonAltFileFromHockeyXML(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[9]):
argparser = argparse.ArgumentParser(description=getactdesc[7], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defsdbfile), help="sqlite database to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="sql dump file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeySQLFileFromHockeyDatabase(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[10]):
argparser = argparse.ArgumentParser(description=getactdesc[8], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defxmlfile), help="xml file to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="sql dump file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeySQLFileFromHockeyXML(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[11]):
argparser = argparse.ArgumentParser(description=getactdesc[6], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defxmlfile), help="xml file to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', defjsonfile), help="json file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
if(getargs.outfile is None):
libhockeydata.MakeHockeyJSONFromHockeyXML(getargs.infile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
else:
libhockeydata.MakeHockeyJSONFileFromHockeyXML(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[12]):
argparser = argparse.ArgumentParser(description=getactdesc[0], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defsdbfile), help="json file to convert");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', defjsonfile), help="xml file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
libhockeydata.MakeHockeyXMLFileFromHockeyJSON(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
if(curaction==getactlist[13]):
argparser = argparse.ArgumentParser(description=getactdesc[9], conflict_handler="resolve", add_help=True);
argparser.add_argument("-v", "--ver", "--version", action="version", version=__program_name__+" "+__version__);
argparser.add_argument('action', nargs='?', default=curaction);
argparser.add_argument("-i", "-f", "--infile", default=os.environ.get('INFILE', defxmlfile), help="xml file to clean");
argparser.add_argument("-o", "-t", "--outfile", default=os.environ.get('OUTFILE', None), help="clean xml file to output");
argparser.add_argument("-V", "-d", "--verbose", action="store_true", help="print various debugging information");
argparser.add_argument("-j", "-s", "--jsonverbose", action="store_true", help="print various debugging information in json");
getargs = argparser.parse_args();
verboseon = getargs.verbose;
if('VERBOSE' in os.environ or 'DEBUG' in os.environ):
verboseon = True;
if(verboseon):
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG);
if(getargs.outfile is None):
libhockeydata.MakeHockeyXMLFromHockeyXML(getargs.infile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
else:
libhockeydata.MakeHockeyXMLFileFromHockeyXML(getargs.infile, getargs.outfile, verbose=verboseon, jsonverbose=getargs.jsonverbose);
| 67.81459
| 812
| 0.745955
| 2,591
| 22,311
| 6.277113
| 0.103821
| 0.061977
| 0.103296
| 0.036153
| 0.81376
| 0.790212
| 0.782833
| 0.766171
| 0.756333
| 0.742683
| 0
| 0.004565
| 0.086863
| 22,311
| 328
| 813
| 68.021341
| 0.793747
| 0.028058
| 0
| 0.635739
| 0
| 0
| 0.252957
| 0.025786
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013746
| 0
| 0.013746
| 0.116838
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e2425c2cb6764fde1d5472552b81250dc65c17f8
| 1,391
|
py
|
Python
|
databroker_pack/commandline/_utils.py
|
gwbischof/databroker-pack
|
361d4948f495973cc2992904d9db4cff385f9815
|
[
"BSD-3-Clause"
] | 1
|
2021-06-17T04:20:47.000Z
|
2021-06-17T04:20:47.000Z
|
databroker_pack/commandline/_utils.py
|
gwbischof/databroker-pack
|
361d4948f495973cc2992904d9db4cff385f9815
|
[
"BSD-3-Clause"
] | 22
|
2020-04-03T02:14:15.000Z
|
2021-03-12T18:14:40.000Z
|
databroker_pack/commandline/_utils.py
|
gwbischof/databroker-pack
|
361d4948f495973cc2992904d9db4cff385f9815
|
[
"BSD-3-Clause"
] | 7
|
2020-04-01T01:24:12.000Z
|
2020-08-31T20:15:03.000Z
|
import argparse
from .._version import get_versions
class ShowVersionAction(argparse.Action):
# a special action that allows the usage --version to override
# any 'required args' requirements, the same way that --help does
def __init__(
self,
option_strings,
dest=argparse.SUPPRESS,
default=argparse.SUPPRESS,
help=None,
):
super().__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help,
)
def __call__(self, parser, namespace, values, option_string=None):
print(get_versions()["version"])
parser.exit()
class ListCatalogsAction(argparse.Action):
# a special action that allows the usage --list-catalogs to override
# any 'required args' requirements, the same way that --help does
def __init__(
self,
option_strings,
dest=argparse.SUPPRESS,
default=argparse.SUPPRESS,
help=None,
):
super().__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help,
)
def __call__(self, parser, namespace, values, option_string=None):
import databroker
for name in databroker.catalog:
print(name)
parser.exit()
| 25.759259
| 72
| 0.59885
| 145
| 1,391
| 5.503448
| 0.358621
| 0.097744
| 0.085213
| 0.055138
| 0.749373
| 0.749373
| 0.749373
| 0.749373
| 0.749373
| 0.634085
| 0
| 0.002101
| 0.3156
| 1,391
| 53
| 73
| 26.245283
| 0.836134
| 0.183321
| 0
| 0.75
| 0
| 0
| 0.006189
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.075
| 0
| 0.225
| 0.05
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e2958d1ef1431702155d88e34dcd6aef3658220b
| 190
|
py
|
Python
|
backend/server/assets/tokens.py
|
dkubatko/BubbleText
|
0910ddabe5eda9aa6f6c9254e42411c9b01a6516
|
[
"MIT"
] | null | null | null |
backend/server/assets/tokens.py
|
dkubatko/BubbleText
|
0910ddabe5eda9aa6f6c9254e42411c9b01a6516
|
[
"MIT"
] | null | null | null |
backend/server/assets/tokens.py
|
dkubatko/BubbleText
|
0910ddabe5eda9aa6f6c9254e42411c9b01a6516
|
[
"MIT"
] | null | null | null |
import secrets
import settings.token_settings as local_settings
class Tokens:
@classmethod
def generate_token(cls):
return secrets.token_hex(local_settings.TOKEN_BITS_USED)
| 23.75
| 64
| 0.789474
| 25
| 190
| 5.72
| 0.64
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 190
| 7
| 65
| 27.142857
| 0.89375
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
2c3c22ca18dfc22b019f99c12d12105b68f75774
| 24
|
py
|
Python
|
populy/__init__.py
|
R-mario/simuPy
|
ff5498c340851cbdc4fedd32c296ee9b50bbc06a
|
[
"MIT"
] | null | null | null |
populy/__init__.py
|
R-mario/simuPy
|
ff5498c340851cbdc4fedd32c296ee9b50bbc06a
|
[
"MIT"
] | null | null | null |
populy/__init__.py
|
R-mario/simuPy
|
ff5498c340851cbdc4fedd32c296ee9b50bbc06a
|
[
"MIT"
] | null | null | null |
from . import population
| 24
| 24
| 0.833333
| 3
| 24
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2c7fbd5fc36b5a936cb0a897b203b9ff0560a954
| 105
|
py
|
Python
|
codigo/Live165/exemplo_01.py
|
BrunoPontesLira/live-de-python
|
da6e463a89ed90d9efaa1c34088ab6460e949de1
|
[
"MIT"
] | 572
|
2018-04-03T03:17:08.000Z
|
2022-03-31T19:05:32.000Z
|
codigo/Live165/exemplo_01.py
|
BrunoPontesLira/live-de-python
|
da6e463a89ed90d9efaa1c34088ab6460e949de1
|
[
"MIT"
] | 176
|
2018-05-18T15:56:16.000Z
|
2022-03-28T20:39:07.000Z
|
codigo/Live165/exemplo_01.py
|
BrunoPontesLira/live-de-python
|
da6e463a89ed90d9efaa1c34088ab6460e949de1
|
[
"MIT"
] | 140
|
2018-04-18T13:59:11.000Z
|
2022-03-29T00:43:49.000Z
|
from pydantic import validate_arguments
@validate_arguments
def soma(x: int, y: int):
return x + y
| 15
| 39
| 0.733333
| 16
| 105
| 4.6875
| 0.6875
| 0.453333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 105
| 6
| 40
| 17.5
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
2cbaaa9cb2c1f9c240ff43e15a551596128bfdc2
| 22
|
py
|
Python
|
examples/example_open_mini1.py
|
ivan-kulikov-dev/pytib
|
3be8c12071548944628cb517ff0bc41b7fd4d9cd
|
[
"MIT"
] | null | null | null |
examples/example_open_mini1.py
|
ivan-kulikov-dev/pytib
|
3be8c12071548944628cb517ff0bc41b7fd4d9cd
|
[
"MIT"
] | null | null | null |
examples/example_open_mini1.py
|
ivan-kulikov-dev/pytib
|
3be8c12071548944628cb517ff0bc41b7fd4d9cd
|
[
"MIT"
] | null | null | null |
from .. import pytib
| 7.333333
| 20
| 0.681818
| 3
| 22
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 22
| 2
| 21
| 11
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e2c27fcb493c0083d1c0bea4dd6c21fb4d8dfc02
| 7,688
|
py
|
Python
|
tasks/tcn_call_test.py
|
evanharwin/keras-tcn
|
81706eaabc069ad27a1786625bbf731b247bf2f0
|
[
"MIT"
] | 1,473
|
2018-03-22T11:57:25.000Z
|
2022-03-31T02:49:58.000Z
|
tasks/tcn_call_test.py
|
gf9619/keras-tcn
|
078901b98e8538b85b5d40953ca8fc52ad9274e0
|
[
"MIT"
] | 177
|
2018-04-05T04:28:55.000Z
|
2022-03-19T13:26:57.000Z
|
tasks/tcn_call_test.py
|
gf9619/keras-tcn
|
078901b98e8538b85b5d40953ca8fc52ad9274e0
|
[
"MIT"
] | 414
|
2018-03-23T06:36:44.000Z
|
2022-03-29T11:07:00.000Z
|
import unittest
import numpy as np
from tensorflow.keras import Input
from tensorflow.keras import Model
from tensorflow.keras.models import Sequential
from tcn import TCN
NB_FILTERS = 16
TIME_STEPS = 20
SEQ_LEN_1 = 5
SEQ_LEN_2 = 1
SEQ_LEN_3 = 10
def predict_with_tcn(time_steps=None, padding='causal', return_sequences=True) -> list:
input_dim = 4
i = Input(batch_shape=(None, time_steps, input_dim))
o = TCN(nb_filters=NB_FILTERS, return_sequences=return_sequences, padding=padding)(i)
m = Model(inputs=[i], outputs=[o])
m.compile(optimizer='adam', loss='mse')
if time_steps is None:
np.random.seed(123)
return [
m(np.random.rand(1, SEQ_LEN_1, input_dim)),
m(np.random.rand(1, SEQ_LEN_2, input_dim)),
m(np.random.rand(1, SEQ_LEN_3, input_dim))
]
else:
np.random.seed(123)
return [m(np.random.rand(1, time_steps, input_dim))]
class TCNCallTest(unittest.TestCase):
def test_compute_output_for_multiple_config(self):
# with time steps None.
o1 = TCN(nb_filters=NB_FILTERS, return_sequences=True, padding='same').compute_output_shape((None, None, 4))
self.assertListEqual(list(o1), [None, None, NB_FILTERS])
o2 = TCN(nb_filters=NB_FILTERS, return_sequences=True, padding='causal').compute_output_shape((None, None, 4))
self.assertListEqual(list(o2), [None, None, NB_FILTERS])
o3 = TCN(nb_filters=NB_FILTERS, return_sequences=False, padding='same').compute_output_shape((None, None, 4))
self.assertListEqual(list(o3), [None, NB_FILTERS])
o4 = TCN(nb_filters=NB_FILTERS, return_sequences=False, padding='causal').compute_output_shape((None, None, 4))
self.assertListEqual(list(o4), [None, NB_FILTERS])
# with time steps known.
o5 = TCN(nb_filters=NB_FILTERS, return_sequences=True, padding='same').compute_output_shape((None, 5, 4))
self.assertListEqual(list(o5), [None, 5, NB_FILTERS])
o6 = TCN(nb_filters=NB_FILTERS, return_sequences=True, padding='causal').compute_output_shape((None, 5, 4))
self.assertListEqual(list(o6), [None, 5, NB_FILTERS])
o7 = TCN(nb_filters=NB_FILTERS, return_sequences=False, padding='same').compute_output_shape((None, 5, 4))
self.assertListEqual(list(o7), [None, NB_FILTERS])
o8 = TCN(nb_filters=NB_FILTERS, return_sequences=False, padding='causal').compute_output_shape((None, 5, 4))
self.assertListEqual(list(o8), [None, NB_FILTERS])
def test_causal_time_dim_known_return_sequences(self):
r = predict_with_tcn(time_steps=TIME_STEPS, padding='causal', return_sequences=True)
self.assertListEqual([list(b.shape) for b in r], [[1, TIME_STEPS, NB_FILTERS]])
def test_causal_time_dim_unknown_return_sequences(self):
r = predict_with_tcn(time_steps=None, padding='causal', return_sequences=True)
self.assertListEqual([list(b.shape) for b in r],
[[1, SEQ_LEN_1, NB_FILTERS],
[1, SEQ_LEN_2, NB_FILTERS],
[1, SEQ_LEN_3, NB_FILTERS]])
def test_non_causal_time_dim_known_return_sequences(self):
r = predict_with_tcn(time_steps=TIME_STEPS, padding='same', return_sequences=True)
self.assertListEqual([list(b.shape) for b in r], [[1, TIME_STEPS, NB_FILTERS]])
def test_non_causal_time_dim_unknown_return_sequences(self):
r = predict_with_tcn(time_steps=None, padding='same', return_sequences=True)
self.assertListEqual([list(b.shape) for b in r],
[[1, SEQ_LEN_1, NB_FILTERS],
[1, SEQ_LEN_2, NB_FILTERS],
[1, SEQ_LEN_3, NB_FILTERS]])
def test_causal_time_dim_known_return_no_sequences(self):
r = predict_with_tcn(time_steps=TIME_STEPS, padding='causal', return_sequences=False)
self.assertListEqual([list(b.shape) for b in r], [[1, NB_FILTERS]])
def test_causal_time_dim_unknown_return_no_sequences(self):
r = predict_with_tcn(time_steps=None, padding='causal', return_sequences=False)
self.assertListEqual([list(b.shape) for b in r], [[1, NB_FILTERS], [1, NB_FILTERS], [1, NB_FILTERS]])
def test_non_causal_time_dim_known_return_no_sequences(self):
r = predict_with_tcn(time_steps=TIME_STEPS, padding='same', return_sequences=False)
self.assertListEqual([list(b.shape) for b in r], [[1, NB_FILTERS]])
def test_non_causal_time_dim_unknown_return_no_sequences(self):
r = predict_with_tcn(time_steps=None, padding='same', return_sequences=False)
self.assertListEqual([list(b.shape) for b in r], [[1, NB_FILTERS], [1, NB_FILTERS], [1, NB_FILTERS]])
def test_norms(self):
Sequential(layers=[TCN(input_shape=(20, 2), use_weight_norm=True)]).compile(optimizer='adam', loss='mse')
Sequential(layers=[TCN(input_shape=(20, 2), use_weight_norm=False)]).compile(optimizer='adam', loss='mse')
Sequential(layers=[TCN(input_shape=(20, 2), use_layer_norm=True)]).compile(optimizer='adam', loss='mse')
Sequential(layers=[TCN(input_shape=(20, 2), use_layer_norm=False)]).compile(optimizer='adam', loss='mse')
Sequential(layers=[TCN(input_shape=(20, 2), use_batch_norm=True)]).compile(optimizer='adam', loss='mse')
Sequential(layers=[TCN(input_shape=(20, 2), use_batch_norm=False)]).compile(optimizer='adam', loss='mse')
try:
Sequential(layers=[TCN(input_shape=(20, 2), use_batch_norm=True, use_weight_norm=True)]).compile(
optimizer='adam', loss='mse')
raise AssertionError('test failed.')
except ValueError:
pass
try:
Sequential(layers=[TCN(input_shape=(20, 2), use_batch_norm=True,
use_weight_norm=True, use_layer_norm=True)]).compile(
optimizer='adam', loss='mse')
raise AssertionError('test failed.')
except ValueError:
pass
def test_receptive_field(self):
self.assertEqual(37, TCN(kernel_size=3, dilations=(1, 3, 5), nb_stacks=1).receptive_field)
self.assertEqual(379, TCN(kernel_size=4, dilations=(1, 2, 4, 8, 16, 32), nb_stacks=1).receptive_field)
self.assertEqual(253, TCN(kernel_size=3, dilations=(1, 2, 4, 8, 16, 32), nb_stacks=1).receptive_field)
self.assertEqual(125, TCN(kernel_size=3, dilations=(1, 2, 4, 8, 16), nb_stacks=1).receptive_field)
self.assertEqual(61, TCN(kernel_size=3, dilations=(1, 2, 4, 8), nb_stacks=1).receptive_field)
self.assertEqual(29, TCN(kernel_size=3, dilations=(1, 2, 4), nb_stacks=1).receptive_field)
self.assertEqual(57, TCN(kernel_size=3, dilations=(1, 2, 4), nb_stacks=2).receptive_field)
self.assertEqual(121, TCN(kernel_size=3, dilations=(1, 2, 4, 8), nb_stacks=2).receptive_field)
self.assertEqual(91, TCN(kernel_size=4, dilations=(1, 2, 4, 8), nb_stacks=1).receptive_field)
self.assertEqual(25, TCN(kernel_size=5, dilations=(1, 2), nb_stacks=1).receptive_field)
self.assertEqual(31, TCN(kernel_size=6, dilations=(1, 2), nb_stacks=1).receptive_field)
# 1+(3-1)*1*(1+3+5)*2 = 37
# 1+(4-1)*1*(1+2+4+8+16+32)*2 = 379
# 1+(3-1)*1*(1+2+4+8+16+32)*2 = 253
# 1+(3-1)*1*(1+2+4+8+16)*2 = 125
# 1+(3-1)*1*(1+2+4+8)*2 = 61
# 1+(3-1)*1*(1+2+4)*2 = 29
# 1+(3-1)*2*(1+2+4)*2 = 57
# 1+(3-1)*2*(1+2+4+8)*2 = 121
# 1+(4-1)*1*(1+2+4+8)*2 = 91
# 1+(5-1)*1*(1+2)*2 = 25
# 1+(6-1)*1*(1+2)*2 = 31
if __name__ == '__main__':
unittest.main()
| 50.913907
| 119
| 0.653356
| 1,150
| 7,688
| 4.123478
| 0.105217
| 0.081611
| 0.077604
| 0.010122
| 0.840574
| 0.831295
| 0.826234
| 0.767609
| 0.749473
| 0.719528
| 0
| 0.052555
| 0.198101
| 7,688
| 150
| 120
| 51.253333
| 0.716626
| 0.045005
| 0
| 0.238532
| 0
| 0
| 0.0247
| 0
| 0
| 0
| 0
| 0
| 0.266055
| 1
| 0.110092
| false
| 0.018349
| 0.055046
| 0
| 0.192661
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1a8eaf223b1cd6517bfafcfcc9785ab80786e4b5
| 18
|
py
|
Python
|
lib/wombatwiki/__init__.py
|
jimeggleston/wombatwiki
|
5e0d7c8e6e4037de4040585b91fd5f8db7293b36
|
[
"MIT"
] | null | null | null |
lib/wombatwiki/__init__.py
|
jimeggleston/wombatwiki
|
5e0d7c8e6e4037de4040585b91fd5f8db7293b36
|
[
"MIT"
] | null | null | null |
lib/wombatwiki/__init__.py
|
jimeggleston/wombatwiki
|
5e0d7c8e6e4037de4040585b91fd5f8db7293b36
|
[
"MIT"
] | null | null | null |
import os, sys, re
| 18
| 18
| 0.722222
| 4
| 18
| 3.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 18
| 1
| 18
| 18
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
46d13a57e062b83dfd9097293233a5e9d6f88680
| 74
|
py
|
Python
|
build/lib/amap/__init__.py
|
commerceblock/asset-mapping
|
ba33f2698ba7a9f7f1c59fea531cad919ed34fa3
|
[
"MIT"
] | 3
|
2019-06-12T16:43:10.000Z
|
2021-09-20T06:46:20.000Z
|
build/lib/amap/__init__.py
|
cryptopoly/asset-mapping
|
d61b196066fadc54f1a1600fb72ef7c5627db3e4
|
[
"MIT"
] | 41
|
2019-05-03T14:52:45.000Z
|
2019-10-25T09:07:15.000Z
|
build/lib/amap/__init__.py
|
cryptopoly/asset-mapping
|
d61b196066fadc54f1a1600fb72ef7c5627db3e4
|
[
"MIT"
] | 3
|
2019-06-12T16:44:14.000Z
|
2021-11-09T20:32:18.000Z
|
from ._version import __version__
import amap.mapping
import amap.rpchost
| 18.5
| 33
| 0.851351
| 10
| 74
| 5.8
| 0.6
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 74
| 3
| 34
| 24.666667
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
46f71c943050902ec31441de0602aa6bdfc7b6ce
| 84
|
py
|
Python
|
src/visualization/recall.py
|
FilipCvetko/clinical_notes
|
5fa0d26898e0083fcf424cdd61d2190b32a495be
|
[
"MIT"
] | 2
|
2021-11-17T16:51:47.000Z
|
2021-12-16T23:00:26.000Z
|
src/visualization/recall.py
|
FilipCvetko/clinical_notes
|
5fa0d26898e0083fcf424cdd61d2190b32a495be
|
[
"MIT"
] | null | null | null |
src/visualization/recall.py
|
FilipCvetko/clinical_notes
|
5fa0d26898e0083fcf424cdd61d2190b32a495be
|
[
"MIT"
] | null | null | null |
import streamlit as st
def recall_app():
st.title("Patient notes collection")
| 14
| 40
| 0.72619
| 12
| 84
| 5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 84
| 5
| 41
| 16.8
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2037a9e902af27e73c40842b62da9f0262ffbba5
| 126
|
py
|
Python
|
ProteoPy/__init__.py
|
cossio/ProteoPy
|
a3569dcef34e4d416863c812e90498d749dc288f
|
[
"MIT"
] | null | null | null |
ProteoPy/__init__.py
|
cossio/ProteoPy
|
a3569dcef34e4d416863c812e90498d749dc288f
|
[
"MIT"
] | null | null | null |
ProteoPy/__init__.py
|
cossio/ProteoPy
|
a3569dcef34e4d416863c812e90498d749dc288f
|
[
"MIT"
] | null | null | null |
"""
Simple tools to handle proteomics data.
"""
from ProteoPy.Services import Services
import ProteoPy.io
import ProteoPy.util
| 21
| 39
| 0.801587
| 17
| 126
| 5.941176
| 0.705882
| 0.277228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 126
| 6
| 40
| 21
| 0.90991
| 0.309524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
64562b5b8cc5301460a6fc567e4440a83f9331b0
| 158
|
py
|
Python
|
backend/apps/sampleapp/admin.py
|
domasx2/django-angular-docker-seed
|
5c1ad6d62d179c9cb5cdbf7b1254576efa63b2fb
|
[
"Unlicense"
] | 32
|
2015-04-27T02:01:59.000Z
|
2021-04-06T10:19:42.000Z
|
backend/apps/sampleapp/admin.py
|
domasx2/django-angular-docker-seed
|
5c1ad6d62d179c9cb5cdbf7b1254576efa63b2fb
|
[
"Unlicense"
] | 14
|
2015-03-21T08:20:34.000Z
|
2016-02-15T07:07:39.000Z
|
backend/apps/sampleapp/admin.py
|
domasx2/django-angular-docker-seed
|
5c1ad6d62d179c9cb5cdbf7b1254576efa63b2fb
|
[
"Unlicense"
] | 21
|
2015-03-18T18:40:12.000Z
|
2021-03-16T22:12:44.000Z
|
from django.contrib import admin
from .models import Upload
# Register your models here.
@admin.register(Upload)
class UploadAdmin(admin.ModelAdmin):
pass
| 26.333333
| 36
| 0.797468
| 21
| 158
| 6
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126582
| 158
| 6
| 37
| 26.333333
| 0.913043
| 0.164557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
645ecd2ccaa20cd884d58417b03f4c6851420cdb
| 11,305
|
py
|
Python
|
tests/mock_decorators/test_function_mock.py
|
fhuertas/mock_decorator
|
bb8971398aa80f555089db0ca57fff5a648f5405
|
[
"Apache-2.0"
] | null | null | null |
tests/mock_decorators/test_function_mock.py
|
fhuertas/mock_decorator
|
bb8971398aa80f555089db0ca57fff5a648f5405
|
[
"Apache-2.0"
] | null | null | null |
tests/mock_decorators/test_function_mock.py
|
fhuertas/mock_decorator
|
bb8971398aa80f555089db0ca57fff5a648f5405
|
[
"Apache-2.0"
] | null | null | null |
import sys
import unittest
from mock_decorators.function_mock import FunctionMock
from mock_decorators.function_mock import FunctionMockResult
from mock_decorators.function_mock import FunctionMockChangeResult
from mock_decorators.function_mock import FunctionMockCheckCall
from tests.mock_decorators import module_test
from tests.mock_decorators.module_test import TestClass
class TestFunctionMock(unittest.TestCase):
p1 = 10
p2 = 2
def test_function_mock_correct(self):
def function_mocked(param_a, param_b):
return param_a - param_b
@FunctionMock(entity=module_test, function_name='function_sum', mocked_function=function_mocked,
check_signature=True)
def inner_test():
return module_test.function_sum(self.p1, self.p2)
result_no_mocked = module_test.function_sum(self.p1, self.p2)
result_mocked = inner_test()
expected_mocked = self.p1 - self.p2
expected_no_mocked = self.p1 + self.p2
self.assertEqual(result_mocked, expected_mocked, "The FunctionMock has failed")
self.assertEqual(result_no_mocked, expected_no_mocked, "The FunctionMock has failed")
def test_function_mock_correct_a_class(self):
result_of_the_function_mock = "mock mock mock"
def function_mocked(self, *args, **kwargs):
return result_of_the_function_mock
test_class = TestClass()
@FunctionMock(entity=TestClass, function_name='function_echo', mocked_function=function_mocked,
check_signature=True)
def inner_test():
return test_class.function_echo("No echo")
result_mocked = inner_test()
self.assertEqual(result_mocked, result_of_the_function_mock, "The FunctionMock has failed")
self.assertNotEqual(test_class.function_echo("No echo"),
result_of_the_function_mock, "The FunctionMock has failed")
def test_function_mock_bad_signature(self):
def function_mocked(param, param_b):
return param - param_b
@FunctionMock(entity=module_test, function_name='function_sum', mocked_function=function_mocked,
check_signature=True)
def inner_test():
module_test.function_sum(self.p1, self.p2)
if sys.version_info < (3, 4):
self.assertRaisesRegexp(TypeError, "signature", inner_test)
else:
self.assertRaisesRegex(TypeError, "signature", inner_test)
def test_function_mock_not_exists(self):
def function_mocked(param, param_b):
return param - param_b
@FunctionMock(entity=module_test, function_name='function_suma', mocked_function=function_mocked,
check_signature=True)
def inner_test():
module_test.function_suma(self.p1, self.p2)
if sys.version_info < (3, 4):
self.assertRaises(TypeError, inner_test)
else:
self.assertRaisesRegex(TypeError, "unsupported callable", inner_test)
def test_function_mock_bad_signature_no_checked(self):
def function_mocked(param, param_b):
return param - param_b
@FunctionMock(entity=module_test, function_name='function_sum', mocked_function=function_mocked,
check_signature=False)
def inner_test():
return module_test.function_sum(self.p1, self.p2)
result_no_mocked = module_test.function_sum(self.p1, self.p2)
result_mocked = inner_test()
expected_mocked = self.p1 - self.p2
expected_no_mocked = self.p1 + self.p2
self.assertEqual(result_mocked, expected_mocked, "The FunctionMock has failed")
self.assertEqual(result_no_mocked, expected_no_mocked, "The FunctionMock has failed")
def test_function_mock_no_exists(self):
def function_mocked(param, param_b):
return param - param_b
@FunctionMock(entity=module_test, function_name='function_summ', mocked_function=function_mocked,
check_signature=True)
def inner_test():
pass
if sys.version_info < (3, 4):
self.assertRaises(TypeError, inner_test)
else:
self.assertRaisesRegex(TypeError, 'unsupported callable', inner_test)
class TestFunctionMockResult(unittest.TestCase):
def test_function_mock_result_correct(self):
result_returned = -21231
@FunctionMockResult(module_test, 'function_sum', result_returned, True)
def inner_test():
return module_test.function_sum(1, 1)
result_value_mocked = inner_test()
self.assertTrue(result_value_mocked, result_returned)
def test_function_mock_result_correct_no_exist(self):
result_returned = -21231
@FunctionMockResult(module_test, 'function_suma', result_returned, True)
def inner_test():
return module_test.function_sum(1, 1)
if sys.version_info < (3, 3):
self.assertRaisesRegexp(TypeError, "the function don't exist", inner_test)
else:
self.assertRaisesRegex(TypeError, "the function don't exist", inner_test)
def test_function_mock_result_correct_no_exist_no_checked(self):
result_returned = -21231
@FunctionMockResult(module_test, 'function_suma', result_returned, False)
def inner_test():
return module_test.function_sum(1, 1)
result_value_mocked = inner_test()
self.assertTrue(result_value_mocked, result_returned)
def test_function_mock_no_function(self):
result_returned = -21231
invalid_function_name = 'invalid_function'
def inner_test():
@FunctionMockResult(module_test, invalid_function_name, result_returned)
def call_test():
module_test.function_sum(1, 1)
call_test()
if sys.version_info < (3, 0):
self.assertRaisesRegexp(AttributeError, invalid_function_name, inner_test)
else:
self.assertRaisesRegex(AttributeError, invalid_function_name, inner_test)
class TestFunctionMockChangeResult(unittest.TestCase):
first_parameter = 500
second_parameter = 10
def test_function_mock_change_result_correct(self):
def function_change(value):
return value + 2
@FunctionMockChangeResult(module_test, 'function_sum', function_change)
def inner_test():
return module_test.function_sum(self.first_parameter, self.second_parameter)
result_no_mocked = module_test.function_sum(self.first_parameter, self.second_parameter)
result_mocked = inner_test()
expected_mocked = self.first_parameter + self.second_parameter + 2
expected_no_mocked = self.first_parameter + self.second_parameter
self.assertEqual(result_mocked, expected_mocked, "The FunctionMock has failed")
self.assertEqual(result_no_mocked, expected_no_mocked, "The FunctionMock has failed")
def test_function_mock_change_result_incorrect_function(self):
def function_change():
return 2
@FunctionMockChangeResult(module_test, 'function_sum', function_change)
def inner_test():
if sys.version_info < (3, 0):
self.assertRaisesRegexp(TypeError, 'takes no arguments',
module_test.function_sum, self.first_parameter, self.second_parameter)
else:
self.assertRaisesRegex(TypeError, 'positional arguments but 1 was given',
module_test.function_sum, self.first_parameter, self.second_parameter)
inner_test()
def test_function_mock_change_result_no_function(self):
def function_change(value):
return value + 2
invalid_function_name = 'invalid_function'
def inner_test():
@FunctionMockChangeResult(module_test, invalid_function_name, function_change)
def call_test():
module_test.function_sum(1, 1)
call_test()
if sys.version_info < (3, 0):
self.assertRaisesRegexp(AttributeError, invalid_function_name, inner_test)
else:
self.assertRaisesRegex(AttributeError, invalid_function_name, inner_test)
class TestFunctionMockCheckCall(unittest.TestCase):
def test_no_called(self):
def inner_test():
@FunctionMockCheckCall(module_test, 'function_sum')
def call_test():
pass
call_test()
self.assertRaises(ValueError, inner_test)
def test_called(self):
@FunctionMockCheckCall(module_test, 'function_sum')
def inner_test():
return module_test.function_sum(2, 2)
result = inner_test()
self.assertEqual(result, 4, "The function result has been modified")
def test_call_check_invocations_ok(self):
@FunctionMockCheckCall(module_test, 'function_sum', expected_times=3)
def inner_test():
module_test.function_sum(2, 2)
module_test.function_sum(2, 2)
return module_test.function_sum(2, 2)
result = inner_test()
module_test.function_sum(2, 2)
self.assertEqual(result, 4, "The function result has been modified")
def test_call_check_invocations_ko(self):
@FunctionMockCheckCall(module_test, 'function_sum', expected_times=2)
def inner_test():
module_test.function_sum(2, 2)
module_test.function_sum(2, 2)
return module_test.function_sum(2, 2)
self.assertRaises(ValueError, inner_test)
result = module_test.function_sum(2, 2)
self.assertEqual(result, 4, "The function result has been modified")
def test_call_change_return(self):
@FunctionMockCheckCall(module_test, 'function_sum', return_value=3)
def inner_test():
return module_test.function_sum(2, 2)
result_change = inner_test()
result_no_change = module_test.function_sum(2, 2)
self.assertEqual(result_change, 3, "The function result has been modified")
self.assertEqual(result_no_change, 4, "The function result has been modified")
def test_call_change_return_0(self):
@FunctionMockCheckCall(module_test, 'function_sum', expected_times=1, return_value=0)
def inner_test():
return module_test.function_sum(2, 2)
result_change = inner_test()
result_no_change = module_test.function_sum(2, 2)
self.assertEqual(result_change, 0, "The function result has been modified")
self.assertEqual(result_no_change, 4, "The function result has been modified")
def test_check_no_call_ok(self):
@FunctionMockCheckCall(module_test, 'function_sum', expected_times=0)
def inner_test():
return 3
result_no_change = module_test.function_sum(2, 2)
self.assertEqual(result_no_change, 4, "The function result has been modified")
def test_check_no_call_ko(self):
@FunctionMockCheckCall(module_test, 'function_sum', expected_times=0)
def inner_test():
return module_test.function_sum(2, 2)
self.assertRaises(ValueError, inner_test)
| 38.715753
| 110
| 0.681203
| 1,326
| 11,305
| 5.474359
| 0.078431
| 0.10084
| 0.119025
| 0.115718
| 0.862516
| 0.822565
| 0.748863
| 0.725582
| 0.66359
| 0.625568
| 0
| 0.014661
| 0.239805
| 11,305
| 291
| 111
| 38.848797
| 0.829998
| 0
| 0
| 0.623853
| 0
| 0
| 0.08536
| 0
| 0
| 0
| 0
| 0
| 0.16055
| 1
| 0.247706
| false
| 0.009174
| 0.036697
| 0.09633
| 0.426606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
64b5391c7258de0feed045a9012767481dbf3a5e
| 836
|
py
|
Python
|
tests/test_preloaders.py
|
rohits2/iterlib
|
c239bb8f8485e01e281943bb6e4feea25b8e13d9
|
[
"MIT"
] | null | null | null |
tests/test_preloaders.py
|
rohits2/iterlib
|
c239bb8f8485e01e281943bb6e4feea25b8e13d9
|
[
"MIT"
] | null | null | null |
tests/test_preloaders.py
|
rohits2/iterlib
|
c239bb8f8485e01e281943bb6e4feea25b8e13d9
|
[
"MIT"
] | null | null | null |
import pytest
import iterlib
import random
@pytest.mark.timeout(10)
def test_thread_loader_equality():
random_ints = [random.randint(0, 100) for _ in range(10000)]
random_ints_sq = (x**2 for x in random_ints)
random_ints_sq_list = [x**2 for x in random_ints]
random_ints_thread_loader = iterlib.thread_preload(random_ints_sq)
for i, v in enumerate(random_ints_thread_loader):
assert random_ints_sq_list[i] == v
@pytest.mark.timeout(10)
def test_process_loader_equality():
random_ints = [random.randint(0, 100) for _ in range(10000)]
random_ints_sq = (x**2 for x in random_ints)
random_ints_sq_list = [x**2 for x in random_ints]
random_ints_proc_loader = iterlib.process_preload(random_ints_sq)
for i, v in enumerate(random_ints_proc_loader):
assert random_ints_sq_list[i] == v
| 36.347826
| 70
| 0.740431
| 138
| 836
| 4.137681
| 0.224638
| 0.315236
| 0.168126
| 0.042032
| 0.809107
| 0.809107
| 0.718039
| 0.718039
| 0.61296
| 0.61296
| 0
| 0.037303
| 0.166268
| 836
| 22
| 71
| 38
| 0.781923
| 0
| 0
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.105263
| false
| 0
| 0.157895
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b3b415dd17be406cf74fa5b61a7523d349898dea
| 139
|
py
|
Python
|
hbmqtt/mqtt/puback.py
|
sjlongland/amqtt
|
3db7c9a3501be503beaded052006b4190538dc0e
|
[
"MIT"
] | null | null | null |
hbmqtt/mqtt/puback.py
|
sjlongland/amqtt
|
3db7c9a3501be503beaded052006b4190538dc0e
|
[
"MIT"
] | null | null | null |
hbmqtt/mqtt/puback.py
|
sjlongland/amqtt
|
3db7c9a3501be503beaded052006b4190538dc0e
|
[
"MIT"
] | null | null | null |
import warnings
from amqtt.mqtt.puback import *
warnings.warn("importing hbmqtt is deprecated. Please import amqtt", DeprecationWarning)
| 23.166667
| 88
| 0.81295
| 17
| 139
| 6.647059
| 0.764706
| 0.247788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115108
| 139
| 5
| 89
| 27.8
| 0.918699
| 0
| 0
| 0
| 0
| 0
| 0.366906
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b3c112802497ca70e219de9cbfbac98c0b4a7a6e
| 132
|
py
|
Python
|
aas_core_codegen/csharp/reporting/__init__.py
|
aas-core-works/aas-core-codegen
|
afec2cf363b6cb69816e7724a2b58626e2165869
|
[
"MIT"
] | 5
|
2021-12-29T12:55:34.000Z
|
2022-03-01T17:57:21.000Z
|
aas_core_codegen/csharp/reporting/__init__.py
|
aas-core-works/aas-core-codegen
|
afec2cf363b6cb69816e7724a2b58626e2165869
|
[
"MIT"
] | 10
|
2021-12-29T02:15:55.000Z
|
2022-03-09T11:04:22.000Z
|
aas_core_codegen/csharp/reporting/__init__.py
|
aas-core-works/aas-core-codegen
|
afec2cf363b6cb69816e7724a2b58626e2165869
|
[
"MIT"
] | 2
|
2021-12-29T01:42:12.000Z
|
2022-02-15T13:46:33.000Z
|
"""Generate C# code for reporting errors."""
from aas_core_codegen.csharp.reporting import _generate
generate = _generate.generate
| 26.4
| 55
| 0.80303
| 17
| 132
| 6
| 0.705882
| 0.470588
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106061
| 132
| 4
| 56
| 33
| 0.864407
| 0.287879
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b3d31cd0fdbbf2bb53c2c1b21e03c863e0f7bb7a
| 22
|
py
|
Python
|
examples/optics/__init__.py
|
abbasegbeyemi/pyqtgraph
|
6aeafce477d1d7eebb9d2fe824d4c5573ef9ceed
|
[
"MIT"
] | 150
|
2018-03-27T16:45:37.000Z
|
2022-03-30T03:47:56.000Z
|
examples/optics/__init__.py
|
abbasegbeyemi/pyqtgraph
|
6aeafce477d1d7eebb9d2fe824d4c5573ef9ceed
|
[
"MIT"
] | 67
|
2019-11-30T14:45:05.000Z
|
2022-03-14T20:26:06.000Z
|
examples/optics/__init__.py
|
abbasegbeyemi/pyqtgraph
|
6aeafce477d1d7eebb9d2fe824d4c5573ef9ceed
|
[
"MIT"
] | 40
|
2018-04-06T19:42:21.000Z
|
2022-01-11T00:34:17.000Z
|
from .pyoptic import *
| 22
| 22
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b3e040649ebde9c07b60b7aa6faade509d18de1c
| 39,099
|
py
|
Python
|
tests/unit/cluster_test.py
|
king7997/rubrik-sdk-for-python
|
6a691ec7e1e7ff57812512dcfc549d9052d55f7a
|
[
"MIT"
] | null | null | null |
tests/unit/cluster_test.py
|
king7997/rubrik-sdk-for-python
|
6a691ec7e1e7ff57812512dcfc549d9052d55f7a
|
[
"MIT"
] | null | null | null |
tests/unit/cluster_test.py
|
king7997/rubrik-sdk-for-python
|
6a691ec7e1e7ff57812512dcfc549d9052d55f7a
|
[
"MIT"
] | null | null | null |
import pytest
from rubrik_cdm.exceptions import InvalidParameterException, CDMVersionException, InvalidTypeException
from rubrik_cdm import Connect
def test_cluster_version(rubrik, mocker):
def mock_get_v1_cluster_me_version():
return {'version': '5.0.1-1280'}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_v1_cluster_me_version()
assert rubrik.cluster_version() == "5.0.1-1280"
def test_minimum_installed_cdm_version_met(rubrik, mocker):
def mock_self_cluster_version():
return "5.0.1-1280"
mock_cluster_version = mocker.patch('rubrik_cdm.Connect.cluster_version', autospec=True, spec_set=True)
mock_cluster_version.return_value = mock_self_cluster_version()
assert rubrik.minimum_installed_cdm_version("5.0") is True
def test_minimum_installed_cdm_version_not_met(rubrik, mocker):
def mock_self_cluster_version():
return "5.0.1-1280"
mock_cluster_version = mocker.patch('rubrik_cdm.Connect.cluster_version', autospec=True, spec_set=True)
mock_cluster_version.return_value = mock_self_cluster_version()
assert rubrik.minimum_installed_cdm_version("5.2") is False
def test_cluster_node_ip(rubrik, mocker):
def mock_internal_cluster_me_node():
return {
"hasMore": True,
"data": [
{
"id": "string",
"brikId": "string",
"status": "string",
"ipAddress": "192.168.1.1",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
},
{
"id": "string",
"brikId": "string",
"status": "string",
"ipAddress": "192.168.1.2",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
},
{
"id": "string",
"brikId": "string",
"status": "string",
"ipAddress": "192.168.1.3",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
}
],
"total": 0
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_internal_cluster_me_node()
assert rubrik.cluster_node_ip() == ["192.168.1.1", "192.168.1.2", "192.168.1.3"]
def test_cluster_node_name(rubrik, mocker):
def mock_internal_cluster_me_node():
return {
"hasMore": True,
"data": [
{
"id": "RVM000A000001",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
},
{
"id": "RVM000A000002",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
},
{
"id": "RVM000A000003",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
}
],
"total": 0
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_internal_cluster_me_node()
assert rubrik.cluster_node_name() == ["RVM000A000001", "RVM000A000002", "RVM000A000003"]
def test_end_user_authorization_invalid_object(rubrik):
with pytest.raises(InvalidParameterException):
rubrik.end_user_authorization("object_name", "end_user", "not_a_supported_object_type")
def test_end_user_authorization_invalid_end_user(rubrik, mocker):
def mock_self_object_id():
return "VirtualMachine:::e6a7e6f1-6050-1ee33-9ba6-8e284e2801de-vm-38297"
def mock_internal_user_username():
return []
mock_object_id = mocker.patch('rubrik_cdm.Connect.object_id', autospec=True, spec_set=True)
mock_object_id.return_value = mock_self_object_id
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_internal_user_username()
with pytest.raises(InvalidParameterException):
rubrik.end_user_authorization("object_name", "end_user", "vmware")
def test_end_user_authorization_idempotence(rubrik, mocker):
def mock_self_object_id():
return "VirtualMachine:::e6a7e6f1-6050-1ee33-9ba6-8e284e2801de-vm-38297"
def mock_internal_user_username():
return [
{
"id": "User:::119283ae-22ea-13f3-bfe2-9387cdf1d4a",
"authDomainId": "string",
"username": "string",
"firstName": "string",
"lastName": "string",
"emailAddress": "string",
"contactNumber": "string",
"mfaServerId": "string"
}
]
def mock_internal_authorization_role_end_user_principals():
return {
"hasMore": True,
"data": [
{
"principal": "string",
"privileges": {
"destructiveRestore": [
"string"
],
"restore": [
"VirtualMachine:::e6a7e6f1-6050-1ee33-9ba6-8e284e2801de-vm-38297"
],
"onDemandSnapshot": [
"string"
],
"restoreWithoutDownload": [
"string"
],
"viewEvent": [
"string"
],
"provisionOnInfra": [
"string"
],
"viewReport": [
"string"
]
},
"organizationId": "string"
}
],
"total": 0
}
mock_object_id = mocker.patch('rubrik_cdm.Connect.object_id', autospec=True, spec_set=True)
mock_object_id.return_value = mock_self_object_id()
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.side_effect = [mock_internal_user_username(), mock_internal_authorization_role_end_user_principals()]
assert rubrik.end_user_authorization("object_name", "end_user", "vmware", 1) \
== 'No change required. The End User "end_user" is already authorized to interact with the "object_name" VM.'
def test_end_user_authorization(rubrik, mocker):
def mock_self_object_id():
return "VirtualMachine:::e6a7e6f1-6050-1ee33-9ba6-8e284e2801de-vm-38297"
def mock_internal_user_username():
return [
{
"id": "User:::119283ae-22ea-13f3-bfe2-9387cdf1d4a",
"authDomainId": "string",
"username": "string",
"firstName": "string",
"lastName": "string",
"emailAddress": "string",
"contactNumber": "string",
"mfaServerId": "string"
}
]
def mock_internal_authorization_role_end_user_principals():
return {
"hasMore": True,
"data": [
{
"principal": "string",
"privileges": {
"destructiveRestore": [
"string"
],
"restore": [
"VirtualMachine:::e6a7e6r3-6050-1ee33-9ba6-8e284e2801de"
],
"onDemandSnapshot": [
"string"
],
"restoreWithoutDownload": [
"string"
],
"viewEvent": [
"string"
],
"provisionOnInfra": [
"string"
],
"viewReport": [
"string"
]
},
"organizationId": "string"
}
],
"total": 0
}
def mock_internal_authorization_role_end_user():
return {
"hasMore": False,
"data": [
{
"principal": "User:::119283ae-22ea-13f3-bfe2-9387cdf1d4a",
"privileges": {
"destructiveRestore": [],
"restore": [
"VirtualMachine:::e6a7e6f1-6050-1ee33-9ba6-8e284e2801de-vm-38297-not-present"
],
"onDemandSnapshot": [],
"restoreWithoutDownload": [],
"viewEvent": [],
"provisionOnInfra": [],
"viewReport": []
},
"organizationId": "Organization:::05e3ee0b-5ec1-e33b-88a5-d916855aff5f"
}
],
"total": 1
}
mock_object_id = mocker.patch('rubrik_cdm.Connect.object_id', autospec=True, spec_set=True)
mock_object_id.return_value = mock_self_object_id()
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.side_effect = [mock_internal_user_username(), mock_internal_authorization_role_end_user_principals()]
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_internal_authorization_role_end_user()
assert rubrik.end_user_authorization("object_name", "end_user", "vmware") \
== mock_internal_authorization_role_end_user()
def test_add_vcenter_idempotence(rubrik, mocker):
def mock_v1_vmware_vcenter_primary_cluster_id():
return {
"hasMore": True,
"data": [
{
"caCerts": "string",
"configuredSlaDomainId": "string",
"id": "string",
"name": "string",
"configuredSlaDomainName": "string",
"primaryClusterId": "string",
"hostname": "vCenter-Hostname",
"username": "string",
"conflictResolutionAuthz": "AllowAutoConflictResolution",
"configuredSlaDomainPolarisManagedId": "string"
}
],
"total": 1
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_v1_vmware_vcenter_primary_cluster_id()
assert rubrik.add_vcenter("vCenter-Hostname", "vcenter_username", "vcenter_password") == \
"No change required. The vCenter 'vCenter-Hostname' has already been added to the Rubrik cluster."
def test_add_vcenter(rubrik, mocker):
def mock_v1_vmware_vcenter_primary_cluster_id():
return {
"hasMore": True,
"data": [
{
"caCerts": "string",
"configuredSlaDomainId": "string",
"id": "string",
"name": "string",
"configuredSlaDomainName": "string",
"primaryClusterId": "string",
"hostname": "string",
"username": "string",
"conflictResolutionAuthz": "AllowAutoConflictResolution",
"configuredSlaDomainPolarisManagedId": "string"
}
],
"total": 1
}
def mock_v1_vmware_vcenter():
return {
"id": "string",
"status": "string",
"progress": 0,
"startTime": "2019-04-17T02:46:12.097Z",
"endTime": "2019-04-17T02:46:12.097Z",
"nodeId": "string",
"error": {
"message": "string"
},
"links": [
{
"href": "www.example.com",
"rel": "string"
}
]
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_v1_vmware_vcenter_primary_cluster_id()
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_v1_vmware_vcenter()
assert rubrik.add_vcenter("vCenter-Hostname", "vcenter_username", "vcenter_password") == \
(mock_v1_vmware_vcenter(), "www.example.com")
def test_configure_timezone_invalid_timezone(rubrik):
with pytest.raises(InvalidParameterException):
rubrik.configure_timezone("not_a_supported_timezone")
def test_configure_timezone_idempotence(rubrik, mocker):
def mock_get_v1_cluster_me():
return {
"id": "string",
"version": "string",
"apiVersion": "string",
"name": "string",
"timezone": {
"timezone": "America/Chicago"
},
"geolocation": {
"address": "string"
},
"acceptedEulaVersion": "string",
"latestEulaVersion": "string"
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_v1_cluster_me()
assert rubrik.configure_timezone("America/Chicago") \
== "No change required. The Rubrik cluster is already configured with 'America/Chicago' as it's timezone."
def test_configure_timezone(rubrik, mocker):
def mock_get_v1_cluster_me():
return {
"id": "string",
"version": "string",
"apiVersion": "string",
"name": "string",
"timezone": {
"timezone": "America/Denver"
},
"geolocation": {
"address": "string"
},
"acceptedEulaVersion": "string",
"latestEulaVersion": "string"
}
def mock_patch_v1_cluster_me():
return {
"id": "string",
"version": "string",
"apiVersion": "string",
"name": "string",
"timezone": {
"timezone": "America/Denver"
},
"geolocation": {
"address": "string"
},
"acceptedEulaVersion": "string",
"latestEulaVersion": "string"
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_v1_cluster_me()
mock_patch = mocker.patch('rubrik_cdm.Connect.patch', autospec=True, spec_set=True)
mock_patch.return_value = mock_patch_v1_cluster_me()
assert rubrik.configure_timezone("America/Chicago") == mock_patch_v1_cluster_me()
def test_configure_ntp_invalid_type(rubrik):
with pytest.raises(InvalidTypeException):
rubrik.configure_ntp("not_a_list")
def test_configure_syslog_invalid_protocol(rubrik):
with pytest.raises(InvalidParameterException):
rubrik.configure_syslog("syslog_ip", "not_a_valid_protocol")
def test_configure_syslog_invalid_idempotence(rubrik, mocker):
def mock_get_internal_syslog():
return {
"hasMore": True,
"data": [
{
"hostname": "syslog_ip",
"port": 514,
"protocol": "TCP",
"id": "string"
}
],
"total": 1
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_syslog()
assert rubrik.configure_syslog("syslog_ip", "TCP") == \
"No change required. The Rubrik cluster is already configured to use the syslog server 'syslog_ip' on port '514' using the 'TCP' protocol."
def test_configure_syslog(rubrik, mocker):
def mock_get_internal_syslog():
return {
"hasMore": True,
"data": [
{
"hostname": "syslog_ip",
"port": 514,
"protocol": "TCP",
"id": "string"
}
],
"total": 1
}
def mock_delete_internal_syslog_id():
return {'status_code': '204'}
def mock_post_internal_syslog():
return {
"hostname": "syslog_ip_new",
"port": 514,
"protocol": "TCP",
"id": "string"
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_syslog()
mock_delete = mocker.patch('rubrik_cdm.Connect.delete', autospec=True, spec_set=True)
mock_delete.return_value = mock_delete_internal_syslog_id()
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_post_internal_syslog()
assert rubrik.configure_syslog("syslog_ip_new", "TCP") == mock_post_internal_syslog()
def test_configure_vlan_invalid_ip(rubrik):
with pytest.raises(InvalidParameterException):
rubrik.configure_vlan("vlan", "netmask", "not_valid_a_list_or_dict")
def test_configure_vlan_invalid_number_of_vlans(rubrik, mocker):
def mock_internal_cluster_me_node():
return {
"hasMore": True,
"data": [
{
"id": "RVM000A000001",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
},
{
"id": "RVM000A000002",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
},
{
"id": "RVM000A000003",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
}
],
"total": 0
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_internal_cluster_me_node()
with pytest.raises(InvalidParameterException):
rubrik.configure_vlan("vlan", "netmask", ["IP_1", "IP_2"])
def test_configure_vlan(rubrik, mocker):
def mock_internal_cluster_me_node():
return {
"hasMore": True,
"data": [
{
"id": "RVM000A000001",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
},
{
"id": "RVM000A000002",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
},
{
"id": "RVM000A000003",
"brikId": "string",
"status": "string",
"ipAddress": "string",
"supportTunnel": {
"isTunnelEnabled": True,
"port": 0,
"enabledTime": "2019-04-16T14:16:15.573Z",
"lastActivityTime": "2019-04-16T14:16:15.573Z",
"inactivityTimeoutInSeconds": 0
}
}
],
"total": 0
}
def mock_internal_cluster_me_vlan():
return {
"hasMore": True,
"data": [
{
"vlan": 0,
"netmask": "string",
"interfaces": [
{
"node": "string",
"ip": "string"
}
]
}
],
"total": 0
}
def mock_post_internal_cluster_me_vlan():
return {'status_code': '204'}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.side_effect = [mock_internal_cluster_me_node(), mock_internal_cluster_me_vlan()]
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_post_internal_cluster_me_vlan()
assert rubrik.configure_vlan("100", "netmask", ["IP_1", "IP_2", "IP_3"]) == mock_post_internal_cluster_me_vlan()
def test_configure_dns_servers_invalid_server_ip(rubrik):
with pytest.raises(InvalidTypeException):
rubrik.configure_dns_servers("not_a_valid_server_ip_type")
def test_configure_dns_servers_idempotence(rubrik, mocker):
def mock_get_internal_cluster_me_dns_nameserver():
return [
"server_1"
]
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_cluster_me_dns_nameserver()
assert rubrik.configure_dns_servers(["server_1"]) == \
"No change required. The Rubrik cluster is already configured with the provided DNS servers."
def test_configure_dns_servers(rubrik, mocker):
def mock_get_internal_cluster_me_dns_nameserver():
return [
"server_1",
"server_2"
]
def mock_post_internal_cluster_me_dns_nameserver():
return {'status_code': '204'}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_cluster_me_dns_nameserver()
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_post_internal_cluster_me_dns_nameserver()
assert rubrik.configure_dns_servers(["server_1"]) == mock_post_internal_cluster_me_dns_nameserver()
def test_configure_search_domain_invalid_search_domain(rubrik):
with pytest.raises(InvalidTypeException):
rubrik.configure_search_domain("not_a_valid_search_domain_type")
def test_configure_search_domain_idempotence(rubrik, mocker):
def mock_get_internal_cluster_me_dns_search_domain():
return [
"domain.1",
]
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_cluster_me_dns_search_domain()
assert rubrik.configure_search_domain(["domain.1"]) == \
"No change required. The Rubrik cluster is already configured with the provided DNS Search Domains."
def test_configure_search_domain(rubrik, mocker):
def mock_get_internal_cluster_me_dns_search_domain():
return [
"server_1",
]
def mock_post_internal_cluster_me_dns_search_domain():
return {'status_code': '204'}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_cluster_me_dns_search_domain()
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_post_internal_cluster_me_dns_search_domain()
assert rubrik.configure_search_domain(["domain.1"]) == mock_post_internal_cluster_me_dns_search_domain()
def test_configure_smtp_settings_invalid_encryption(rubrik):
with pytest.raises(InvalidParameterException):
rubrik.configure_smtp_settings("hostname", "port", "from_email", "smtp_username",
"smtp_password", "not_a_valid_encryption_value")
def test_configure_smtp_settings_idempotence(rubrik, mocker):
def mock_get_internal_smtp_instance():
return {
"hasMore": True,
"data": [
{
"id": "string",
"smtpHostname": "hostname",
"smtpPort": 0,
"smtpSecurity": "NONE",
"smtpUsername": "smtp_username",
"fromEmailId": "from_email"
}
],
"total": 1
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_smtp_instance()
assert rubrik.configure_smtp_settings("hostname", "0", "from_email", "smtp_username", "smtp_password", "NONE") == \
"No change required. The Rubrik cluster is already configured with the provided SMTP settings."
def test_configure_smtp_settings_new(rubrik, mocker):
def mock_get_internal_smtp_instance():
return {
"hasMore": True,
"data": [
{
"id": "string",
"smtpHostname": "string",
"smtpPort": 0,
"smtpSecurity": "string",
"smtpUsername": "string",
"fromEmailId": "string"
}
],
"total": 0
}
def mock_post_internal_smtp_instance():
return {
"id": "string",
"smtpHostname": "string",
"smtpPort": 0,
"smtpSecurity": "string",
"smtpUsername": "string",
"fromEmailId": "string"
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_smtp_instance()
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_post_internal_smtp_instance()
assert rubrik.configure_smtp_settings("hostname", "0", "from_email", "smtp_username", "smtp_password", "NONE") == \
mock_post_internal_smtp_instance()
def test_configure_smtp_settings_update(rubrik, mocker):
def mock_get_internal_smtp_instance():
return {
"hasMore": True,
"data": [
{
"id": "string",
"smtpHostname": "string",
"smtpPort": 0,
"smtpSecurity": "string",
"smtpUsername": "string",
"fromEmailId": "string"
}
],
"total": 1
}
def mock_patch_internal_smtp_instance_id():
return {
"id": "string",
"smtpHostname": "string",
"smtpPort": 0,
"smtpSecurity": "string",
"smtpUsername": "string",
"fromEmailId": "string"
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_smtp_instance()
mock_patch = mocker.patch('rubrik_cdm.Connect.patch', autospec=True, spec_set=True)
mock_patch.return_value = mock_patch_internal_smtp_instance_id()
assert rubrik.configure_smtp_settings("hostname", "0", "from_email", "smtp_username", "smtp_password", "NONE") == \
mock_patch_internal_smtp_instance_id()
def test_refresh_vcenter_no_wait(rubrik, mocker):
def mock_object_id():
return "vCenter:::eeeb7c90-a074-1233-e6e6-90386f8c3d70"
def mock_post_v1_vmware_vcenter_id_refresh():
return {
"id": "string",
"status": "string",
"progress": 0,
"startTime": "2019-04-17T21:22:14.214Z",
"endTime": "2019-04-17T21:22:14.214Z",
"nodeId": "string",
"error": {
"message": "string"
},
"links": [
{
"href": "build.rubrik.com",
"rel": "string"
}
]
}
mock_get_object_id = mocker.patch('rubrik_cdm.Connect.object_id', autospec=True, spec_set=True)
mock_get_object_id.return_value = mock_object_id()
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_post_v1_vmware_vcenter_id_refresh()
assert rubrik.refresh_vcenter("vcenter_ip", False) == mock_post_v1_vmware_vcenter_id_refresh()
def test_refresh_vcenter_wait_for_completion(rubrik, mocker):
def mock_object_id():
return "vCenter:::eeeb7c90-a074-1233-e6e6-90386f8c3d70"
def mock_job_status():
return {
"id": "REFRESH_METADATA_eeeeb7c81-a074-4525-b5b5-81796f8c3d70_0d3e10b0-6741-4c1a-e208-c661325c1efd:::0",
"status": "SUCCEEDED",
"startTime": "2019-04-17T21:31:17.785Z",
"endTime": "2019-04-17T21:31:39.056Z",
"nodeId": "cluster:::RVM189S019012",
"links": [
{
"href": "REFRESH_METADATA_eeeeb7c81-a074-4525-b5b5-81796f8c3d70_0d3e10b0-6741-4c1a-e208-c661325c1efd:::0",
"rel": "self"
}
]
}
mock_get_object_id = mocker.patch('rubrik_cdm.Connect.object_id', autospec=True, spec_set=True)
mock_get_object_id.return_value = mock_object_id()
mock_common_api = mocker.patch('rubrik_cdm.Connect._common_api', autospec=True, spec_set=True)
mock_common_api.return_value = mock_job_status()
assert rubrik.refresh_vcenter("vcenter_ip", True) == mock_job_status()
def test_create_user_idempotence(rubrik, mocker):
def mock_get_internal_user():
return [
{
"id": "string",
"authDomainId": "string",
"username": "string",
"firstName": "string",
"lastName": "string",
"emailAddress": "string",
"contactNumber": "string",
"mfaServerId": "string"
}
]
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_user()
assert rubrik.create_user("username", "password") == \
"No change required. The user 'username' already exists on the Rubrik cluster."
def test_create_user(rubrik, mocker):
def mock_get_internal_user():
return []
def mock_post_internal_user():
return {
"id": "string",
"authDomainId": "string",
"username": "username",
"firstName": "string",
"lastName": "string",
"emailAddress": "string",
"contactNumber": "string",
"createdById": "string",
"createTime": "string",
"mfaServerId": "string"
}
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_user()
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_post_internal_user()
assert rubrik.create_user("username", "password") == mock_post_internal_user()
def test_read_only_authorization_minimum_installed_cdm_version(rubrik, mocker):
mock_cluster_version = mocker.patch('rubrik_cdm.Connect.cluster_version', autospec=True, spec_set=True)
mock_cluster_version.return_value = "4.1.2"
with pytest.raises(CDMVersionException):
rubrik.read_only_authorization("username")
def test_read_only_authorization_invalid_user(rubrik, mocker):
def mock_get_internal_user():
return []
mock_cluster_version = mocker.patch('rubrik_cdm.Connect.cluster_version', autospec=True, spec_set=True)
mock_cluster_version.return_value = "5.0.1"
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.return_value = mock_get_internal_user()
with pytest.raises(InvalidParameterException):
rubrik.read_only_authorization("username")
def test_read_only_authorization_idempotence(rubrik, mocker):
def mock_get_internal_user():
return [
{
"id": "string",
"authDomainId": "string",
"username": "string",
"firstName": "string",
"lastName": "string",
"emailAddress": "string",
"contactNumber": "string",
"mfaServerId": "string"
}
]
def mock_get_internal_authorization_role_read_only_admin_principals():
return {
"hasMore": True,
"data": [
{
"principal": "string",
"privileges": {
"basic": [
"Global:::All"
]
},
"organizationId": "string"
}
],
"total": 0
}
mock_cluster_version = mocker.patch('rubrik_cdm.Connect.cluster_version', autospec=True, spec_set=True)
mock_cluster_version.return_value = "5.0.1"
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.side_effect = [mock_get_internal_user(),
mock_get_internal_authorization_role_read_only_admin_principals()]
assert rubrik.read_only_authorization("username") == \
"No change required. The user 'username' already has read-only permissions."
def test_read_only_authorization(rubrik, mocker):
def mock_get_internal_user():
return [
{
"id": "string",
"authDomainId": "string",
"username": "string",
"firstName": "string",
"lastName": "string",
"emailAddress": "string",
"contactNumber": "string",
"mfaServerId": "string"
}
]
def mock_get_internal_authorization_role_read_only_admin_principals():
return {
"hasMore": True,
"data": [
{
"principal": "string",
"privileges": {
"basic": [
"string"
]
},
"organizationId": "string"
}
],
"total": 0
}
def mock_post_internal_authorization_role_read_only_admin():
return {
"hasMore": True,
"data": [
{
"principal": "string",
"privileges": {
"basic": [
"string"
]
},
"organizationId": "string"
}
],
"total": 1
}
mock_cluster_version = mocker.patch('rubrik_cdm.Connect.cluster_version', autospec=True, spec_set=True)
mock_cluster_version.return_value = "5.0.1"
mock_get = mocker.patch('rubrik_cdm.Connect.get', autospec=True, spec_set=True)
mock_get.side_effect = [mock_get_internal_user(),
mock_get_internal_authorization_role_read_only_admin_principals()]
mock_post = mocker.patch('rubrik_cdm.Connect.post', autospec=True, spec_set=True)
mock_post.return_value = mock_post_internal_authorization_role_read_only_admin()
assert rubrik.read_only_authorization("username") == mock_post_internal_authorization_role_read_only_admin()
| 34.631532
| 147
| 0.532034
| 3,548
| 39,099
| 5.549324
| 0.074972
| 0.03342
| 0.044035
| 0.051806
| 0.89644
| 0.839301
| 0.800904
| 0.760323
| 0.723805
| 0.699172
| 0
| 0.04698
| 0.355431
| 39,099
| 1,128
| 148
| 34.662234
| 0.734267
| 0
| 0
| 0.649336
| 0
| 0.003319
| 0.254252
| 0.092765
| 0
| 0
| 0
| 0
| 0.029867
| 1
| 0.100664
| false
| 0.00885
| 0.003319
| 0.057522
| 0.161504
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
374d747e117617bc056bc2c94da7f6a24dce3269
| 25
|
py
|
Python
|
Morton3D/__init__.py
|
Jianningli/Morton3D
|
7a58e067f23cbd1ff433219a4a6e4b0b0b3f8cdd
|
[
"MIT"
] | 1
|
2020-08-24T08:13:01.000Z
|
2020-08-24T08:13:01.000Z
|
Morton3D/__init__.py
|
Jianningli/Morton3D
|
7a58e067f23cbd1ff433219a4a6e4b0b0b3f8cdd
|
[
"MIT"
] | null | null | null |
Morton3D/__init__.py
|
Jianningli/Morton3D
|
7a58e067f23cbd1ff433219a4a6e4b0b0b3f8cdd
|
[
"MIT"
] | null | null | null |
from .Morton3D import *
| 8.333333
| 23
| 0.72
| 3
| 25
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.2
| 25
| 2
| 24
| 12.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
377760793eb22d4545e17687b9ee614a361a9a5e
| 20
|
py
|
Python
|
cantera_tools/__init__.py
|
amarkpayne/ampayne_tools
|
a47e9baa70edaedfb655ab51f79bf5035eaacd83
|
[
"MIT"
] | null | null | null |
cantera_tools/__init__.py
|
amarkpayne/ampayne_tools
|
a47e9baa70edaedfb655ab51f79bf5035eaacd83
|
[
"MIT"
] | null | null | null |
cantera_tools/__init__.py
|
amarkpayne/ampayne_tools
|
a47e9baa70edaedfb655ab51f79bf5035eaacd83
|
[
"MIT"
] | null | null | null |
from tools import *
| 10
| 19
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
378226377e34e449760fd9cbac470ce87823480e
| 2,316
|
py
|
Python
|
tests/backend/metadata/test_hatch.py
|
ashemedai/hatch
|
9ec00d5e027c992efbc16dd777b1f6926368b6bf
|
[
"MIT"
] | null | null | null |
tests/backend/metadata/test_hatch.py
|
ashemedai/hatch
|
9ec00d5e027c992efbc16dd777b1f6926368b6bf
|
[
"MIT"
] | null | null | null |
tests/backend/metadata/test_hatch.py
|
ashemedai/hatch
|
9ec00d5e027c992efbc16dd777b1f6926368b6bf
|
[
"MIT"
] | null | null | null |
import pytest
from hatchling.metadata.core import HatchMetadata
class TestMetadataConfig:
def test_default(self, isolation):
config = {}
metadata = HatchMetadata(str(isolation), config, None)
assert metadata.metadata_config == metadata.metadata_config == {}
def test_not_table(self, isolation):
config = {'metadata': 0}
metadata = HatchMetadata(str(isolation), config, None)
with pytest.raises(TypeError, match='Field `tool.hatch.metadata` must be a table'):
_ = metadata.metadata_config
def test_correct(self, isolation):
config = {'metadata': {'option': True}}
metadata = HatchMetadata(str(isolation), config, None)
assert metadata.metadata_config == metadata.metadata_config == {'option': True}
class TestBuildConfig:
def test_default(self, isolation):
config = {}
metadata = HatchMetadata(str(isolation), config, None)
assert metadata.build_config == metadata.build_config == {}
def test_not_table(self, isolation):
config = {'build': 0}
metadata = HatchMetadata(str(isolation), config, None)
with pytest.raises(TypeError, match='Field `tool.hatch.build` must be a table'):
_ = metadata.build_config
def test_correct(self, isolation):
config = {'build': {'reproducible': True}}
metadata = HatchMetadata(str(isolation), config, None)
assert metadata.build_config == metadata.build_config == {'reproducible': True}
class TestBuildTargets:
def test_default(self, isolation):
config = {}
metadata = HatchMetadata(str(isolation), config, None)
assert metadata.build_targets == metadata.build_targets == {}
def test_not_table(self, isolation):
config = {'build': {'targets': 0}}
metadata = HatchMetadata(str(isolation), config, None)
with pytest.raises(TypeError, match='Field `tool.hatch.build.targets` must be a table'):
_ = metadata.build_targets
def test_correct(self, isolation):
config = {'build': {'targets': {'wheel': {'versions': ['standard']}}}}
metadata = HatchMetadata(str(isolation), config, None)
assert metadata.build_targets == metadata.build_targets == {'wheel': {'versions': ['standard']}}
| 34.567164
| 104
| 0.65544
| 241
| 2,316
| 6.174274
| 0.174274
| 0.181452
| 0.114919
| 0.199597
| 0.868952
| 0.807796
| 0.774194
| 0.692876
| 0.617608
| 0.612231
| 0
| 0.001657
| 0.21848
| 2,316
| 66
| 105
| 35.090909
| 0.820442
| 0
| 0
| 0.477273
| 0
| 0
| 0.111831
| 0.020294
| 0
| 0
| 0
| 0
| 0.136364
| 1
| 0.204545
| false
| 0
| 0.045455
| 0
| 0.318182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
378e5ecda54ca6d9324a219f8fdbd4b8dca2abcf
| 16,269
|
py
|
Python
|
src/ckanext-userdatasets/ckanext/userdatasets/tests/test_auth_actions_unit.py
|
CI-WATER/portal
|
c61660c8389c7af82517cbd0154bc83f9737c4d1
|
[
"BSD-3-Clause"
] | 1
|
2017-12-22T04:53:06.000Z
|
2017-12-22T04:53:06.000Z
|
src/ckanext-userdatasets/ckanext/userdatasets/tests/test_auth_actions_unit.py
|
CI-WATER/portal
|
c61660c8389c7af82517cbd0154bc83f9737c4d1
|
[
"BSD-3-Clause"
] | 4
|
2015-04-16T19:41:17.000Z
|
2020-10-25T18:02:14.000Z
|
src/ckanext-userdatasets/ckanext/userdatasets/tests/test_auth_actions_unit.py
|
CI-WATER/portal
|
c61660c8389c7af82517cbd0154bc83f9737c4d1
|
[
"BSD-3-Clause"
] | null | null | null |
"""Unit tests on the auth functions in ckanext.userdatasets.logic.auth
These tests test the auth functions' implementation independently of CKAN.
For that reason the CKAN calls are patched.
The first two tests check ckanext.userdatasets.auth.auth.user_owns_package_as_member and
ckanext.userdatasets.auth.auth.user_is_member_of_package_org extensively;
these function are then patched when testing the auth functions to ensure that each function's
logic is tested rather than re-testing user_owns_package_as_member each time.
"""
from mock import patch, Mock
from nose import SkipTest
from nose.tools import assert_equal
from ckanext.userdatasets.logic.auth.auth import user_owns_package_as_member, user_is_member_of_package_org
from ckanext.userdatasets.logic.auth.create import package_create, resource_create, resource_view_create
from ckanext.userdatasets.logic.auth.update import package_update, resource_update, resource_view_update
from ckanext.userdatasets.logic.auth.delete import package_delete, resource_delete, resource_view_delete
class SMock:
def __init__(self, **k):
for i in k:
setattr(self, i, k[i])
class TestAuthActionsUnit:
"""Perform unit tests on the auth functions in ckanext.userdatasets.logic.auth"""
@classmethod
def setup_class(cls):
# Check whether this version of CKAN has resource views. Remove this test when branch 1251 gets merged into CKAN master.
try:
from ckan.logic.action.create import resource_view_create
cls.has_resource_views = True
except ImportError:
cls.has_resource_views = False
@patch('ckanext.userdatasets.logic.auth.auth.users_role_for_group_or_org')
def test_user_is_member_of_package_org(self, mock_users_role):
"""Test ckanext.userdatasets.logic.auth.auth.user_is_member_of_package_org
Ensure all the possible combination of parameters always lead to the expected
result.
"""
tests = [
{
'package': SMock(owner_org='carrot'),
'user': SMock(name='turtle'),
'role': 'member',
'result': True
},
{
'package': SMock(owner_org='carrot'),
'user': SMock(name='turtle'),
'role': 'editor',
'result': False
},
{
'package': SMock(owner_org=None),
'user': SMock(name='turtle'),
'role': 'member',
'result': False
},
]
for t in tests:
mock_users_role.return_value = t['role']
assert_equal(user_is_member_of_package_org(t['user'], t['package']), t['result'])
@patch('ckanext.userdatasets.logic.auth.auth.users_role_for_group_or_org')
def test_user_owns_package_as_member(self, mock_users_role):
"""Test ckanext.userdatasets.logic.auth.auth.user_owns_package_as_member
Ensure all the possible combination of parameters always lead to the expected
result.
"""
tests = [
{
'user': SMock(id=444, name='turtle'),
'package': SMock(creator_user_id=444, owner_org='carrot'),
'role': 'member',
'result': True
},
{
'user': SMock(id=445, name='turtle'),
'package': SMock(creator_user_id=444, owner_org='carrot'),
'role': 'member',
'result': False
},
{
'user': SMock(id=444, name='turtle'),
'package': SMock(creator_user_id=444, owner_org=False),
'role': 'member',
'result': False
},
{
'user': SMock(id=444, name='turtle'),
'package': SMock(creator_user_id=444, owner_org='carrot'),
'role': 'editor',
'result': False
}
]
for t in tests:
mock_users_role.return_value = t['role']
assert_equal(user_owns_package_as_member(t['user'], t['package']), t['result'])
@patch('ckanext.userdatasets.logic.auth.create.users_role_for_group_or_org')
@patch('ckanext.userdatasets.logic.auth.create.has_user_permission_for_some_org')
def test_package_create(self, mock_has_perm, mock_users_role):
"""Test ckanext.userdatasets.logic.auth.create.package_create.
Ensure all the possible combination of parameters always lead to the expected
result.
"""
tests = [
{
'context': {'auth_user_obj': SMock(name='turtle')},
'data_dict': {'owner_org': 'carrot'},
'role': 'member',
'has_perm': True,
'result': {'success': True},
},
{
'context': {'auth_user_obj': SMock(name='turtle')},
'data_dict': {'owner_org': 'carrot'},
'role': 'editor',
'has_perm': True,
'result': 'fallback',
},
{
'context': {'auth_user_obj': SMock(name='turtle')},
'data_dict': False,
'role': 'member',
'has_perm': True,
'result': {'success': True},
},
{
'context': {'auth_user_obj': SMock(name='turtle')},
'data_dict': {'other':'value'},
'role': 'member',
'has_perm': True,
'result': {'success': True},
},
{
'context': {'auth_user_obj': SMock(name='turtle')},
'data_dict': False,
'role': 'member',
'has_perm': False,
'result': 'fallback',
},
{
'context': {'auth_user_obj': SMock(name='turtle')},
'data_dict': {'other':'value'},
'role': 'member',
'has_perm': False,
'result': 'fallback',
},
]
for t in tests:
mock_users_role.return_value = t['role']
mock_has_perm.return_value = t['has_perm']
fb = Mock(return_value='fallback')
assert_equal(package_create(fb, t['context'], t['data_dict']), t['result'])
@patch('ckanext.userdatasets.logic.auth.create.user_is_member_of_package_org')
@patch('ckanext.userdatasets.logic.auth.create.user_owns_package_as_member')
@patch('ckanext.userdatasets.logic.auth.create.get_package_object')
def test_resource_create(self, mock_get_package, mock_user_owns, mock_user_is_member):
"""Test ckanext.userdatasets.logic.auth.create.resource_create.
Ensure all routes are tested.
"""
tests = [
{
'user_owns': True,
'user_is_member': True,
'result': {'success': True}
},
{
'user_owns': False,
'user_is_member': True,
'result': {'success': False}
},
{
'user_owns': False,
'user_is_member': False,
'result': 'fallback'
},
]
mock_get_package.return_value = 1
fb = Mock(return_value='fallback')
for t in tests:
mock_user_owns.return_value = t['user_owns']
mock_user_is_member.return_value = t['user_is_member']
assert_equal(resource_create(fb, {'auth_user_obj': 1}, {}), t['result'])
@patch('ckanext.userdatasets.logic.auth.create.user_is_member_of_package_org')
@patch('ckanext.userdatasets.logic.auth.create.user_owns_package_as_member')
@patch('ckanext.userdatasets.logic.auth.create.get_resource_object')
def test_resource_view_create(self, mock_get_resource, mock_user_owns, mock_user_is_member):
"""Test ckanext.userdatasets.logic.auth.create.resource_view_create.
Ensure all routes are tested.
"""
if not self.has_resource_views:
raise SkipTest("This version of CKAN does not have resource views")
tests = [
{
'user_owns': True,
'user_is_member': True,
'result': {'success': True}
},
{
'user_owns': False,
'user_is_member': True,
'result': {'success': False}
},
{
'user_owns': False,
'user_is_member': False,
'result': 'fallback'
},
]
mock_get_resource.return_value = SMock(resource_group=SMock(package=1))
fb = Mock(return_value='fallback')
for t in tests:
mock_user_owns.return_value = t['user_owns']
mock_user_is_member.return_value = t['user_is_member']
assert_equal(resource_view_create(fb, {'auth_user_obj': 1}, {'resource_id':1}), t['result'])
@patch('ckanext.userdatasets.logic.auth.update.user_owns_package_as_member')
@patch('ckanext.userdatasets.logic.auth.update.get_package_object')
def test_package_update(self, mock_get_package, mock_user_owns):
"""Test ckanext.userdatasets.logic.auth.update.package_update.
Ensure both success and failure routes are tested.
"""
mock_get_package.return_value = 1
mock_user_owns.return_value = True
assert_equal(package_update(None, {'auth_user_obj': 1}, {}), {'success': True})
mock_user_owns.return_value = False
fb = Mock(return_value='fallback')
assert_equal(package_update(fb, {'auth_user_obj': 1}, {}), 'fallback')
@patch('ckanext.userdatasets.logic.auth.update.user_is_member_of_package_org')
@patch('ckanext.userdatasets.logic.auth.update.user_owns_package_as_member')
@patch('ckanext.userdatasets.logic.auth.update.get_resource_object')
def test_resource_update(self, mock_get_resource, mock_user_owns, mock_user_is_member):
"""Test ckanext.userdatasets.logic.auth.create.resource_update.
Ensure all routes are tested.
"""
tests = [
{
'user_owns': True,
'user_is_member': True,
'result': {'success': True}
},
{
'user_owns': False,
'user_is_member': True,
'result': {'success': False}
},
{
'user_owns': False,
'user_is_member': False,
'result': 'fallback'
},
]
mock_get_resource.return_value = SMock(resource_group=SMock(package=1))
fb = Mock(return_value='fallback')
for t in tests:
mock_user_owns.return_value = t['user_owns']
mock_user_is_member.return_value = t['user_is_member']
assert_equal(resource_update(fb, {'auth_user_obj': 1}, {}), t['result'])
@patch('ckanext.userdatasets.logic.auth.update.user_is_member_of_package_org')
@patch('ckanext.userdatasets.logic.auth.update.user_owns_package_as_member')
@patch('ckanext.userdatasets.logic.auth.update.get_resource_object')
@patch('ckanext.userdatasets.logic.auth.update.get_resource_view_object')
def test_resource_view_update(self, mock_get_resource_view, mock_get_resource, mock_user_owns, mock_user_is_member):
"""Test ckanext.userdatasets.logic.auth.create.resource_view_update.
Ensure all routes are tested.
"""
if not self.has_resource_views:
raise SkipTest("This version of CKAN does not have resource views")
tests = [
{
'user_owns': True,
'user_is_member': True,
'result': {'success': True}
},
{
'user_owns': False,
'user_is_member': True,
'result': {'success': False}
},
{
'user_owns': False,
'user_is_member': False,
'result': 'fallback'
},
]
mock_get_resource_view.return_value = SMock(resource_id=1)
mock_get_resource.return_value = SMock(resource_group=SMock(package=1))
fb = Mock(return_value='fallback')
for t in tests:
mock_user_owns.return_value = t['user_owns']
mock_user_is_member.return_value = t['user_is_member']
assert_equal(resource_view_update(fb, {'auth_user_obj': 1}, {'resource_id':1}), t['result'])
@patch('ckanext.userdatasets.logic.auth.delete.user_owns_package_as_member')
@patch('ckanext.userdatasets.logic.auth.delete.get_package_object')
def test_package_delete(self, mock_get_package, mock_user_owns):
"""Test ckanext.userdatasets.logic.auth.delete.package_delete.
Ensure both success and failure routes are tested.
"""
mock_get_package.return_value = 1
mock_user_owns.return_value = True
assert_equal(package_delete(None, {'auth_user_obj': 1}, {}), {'success': True})
mock_user_owns.return_value = False
fb = Mock(return_value='fallback')
assert_equal(package_delete(fb, {'auth_user_obj': 1}, {}), 'fallback')
@patch('ckanext.userdatasets.logic.auth.delete.user_is_member_of_package_org')
@patch('ckanext.userdatasets.logic.auth.delete.user_owns_package_as_member')
@patch('ckanext.userdatasets.logic.auth.delete.get_resource_object')
def test_resource_delete(self, mock_get_resource, mock_user_owns, mock_user_is_member):
"""Test ckanext.userdatasets.logic.auth.create.resource_delete.
Ensure all routes are tested.
"""
tests = [
{
'user_owns': True,
'user_is_member': True,
'result': {'success': True}
},
{
'user_owns': False,
'user_is_member': True,
'result': {'success': False}
},
{
'user_owns': False,
'user_is_member': False,
'result': 'fallback'
},
]
mock_get_resource.return_value = SMock(resource_group=SMock(package=1))
fb = Mock(return_value='fallback')
for t in tests:
mock_user_owns.return_value = t['user_owns']
mock_user_is_member.return_value = t['user_is_member']
assert_equal(resource_delete(fb, {'auth_user_obj': 1}, {}), t['result'])
@patch('ckanext.userdatasets.logic.auth.delete.user_is_member_of_package_org')
@patch('ckanext.userdatasets.logic.auth.delete.user_owns_package_as_member')
@patch('ckanext.userdatasets.logic.auth.delete.get_resource_object')
@patch('ckanext.userdatasets.logic.auth.delete.get_resource_view_object')
def test_resource_view_delete(self, mock_get_resource_view, mock_get_resource, mock_user_owns, mock_user_is_member):
"""Test ckanext.userdatasets.logic.auth.create.resource_view_delete.
Ensure all routes are tested.
"""
if not self.has_resource_views:
raise SkipTest("This version of CKAN does not have resource views")
tests = [
{
'user_owns': True,
'user_is_member': True,
'result': {'success': True}
},
{
'user_owns': False,
'user_is_member': True,
'result': {'success': False}
},
{
'user_owns': False,
'user_is_member': False,
'result': 'fallback'
},
]
mock_get_resource_view.return_value = SMock(resource_id=1)
mock_get_resource.return_value = SMock(resource_group=SMock(package=1))
fb = Mock(return_value='fallback')
for t in tests:
mock_user_owns.return_value = t['user_owns']
mock_user_is_member.return_value = t['user_is_member']
assert_equal(resource_view_delete(fb, {'auth_user_obj': 1}, {'resource_id':1}), t['result'])
| 40.979849
| 129
| 0.583687
| 1,830
| 16,269
| 4.882514
| 0.075956
| 0.05014
| 0.063123
| 0.141018
| 0.879239
| 0.840739
| 0.806715
| 0.787129
| 0.753106
| 0.753106
| 0
| 0.004481
| 0.300449
| 16,269
| 396
| 130
| 41.083333
| 0.780599
| 0.121704
| 0
| 0.602524
| 0
| 0
| 0.272565
| 0.127778
| 0
| 0
| 0
| 0
| 0.044164
| 1
| 0.041009
| false
| 0
| 0.028391
| 0
| 0.07571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
37bb3e96c31b694bb15ea7891fdc2993bbe46013
| 39
|
py
|
Python
|
src/python/qeqiskit/simulator/__init__.py
|
TimWeaving/qe-qiskit
|
fe280835971f5c0203a5143f768cbf61fdef722c
|
[
"Apache-2.0"
] | 8
|
2020-10-06T13:54:36.000Z
|
2022-03-03T23:22:31.000Z
|
src/python/qeqiskit/simulator/__init__.py
|
TimWeaving/qe-qiskit
|
fe280835971f5c0203a5143f768cbf61fdef722c
|
[
"Apache-2.0"
] | 39
|
2020-11-26T14:46:52.000Z
|
2022-03-31T12:39:09.000Z
|
src/python/qeqiskit/simulator/__init__.py
|
TimWeaving/qe-qiskit
|
fe280835971f5c0203a5143f768cbf61fdef722c
|
[
"Apache-2.0"
] | 2
|
2021-01-14T22:26:46.000Z
|
2021-09-30T13:40:59.000Z
|
from .simulator import QiskitSimulator
| 19.5
| 38
| 0.871795
| 4
| 39
| 8.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
805c7abf41e06accdf55af22ddf647cecb31459f
| 91
|
py
|
Python
|
oldstuff/meh/api.py
|
miusuarioamigo/python-Le
|
dbb653255dab7d11b87f25eec94bcce63a86aa42
|
[
"MIT"
] | null | null | null |
oldstuff/meh/api.py
|
miusuarioamigo/python-Le
|
dbb653255dab7d11b87f25eec94bcce63a86aa42
|
[
"MIT"
] | null | null | null |
oldstuff/meh/api.py
|
miusuarioamigo/python-Le
|
dbb653255dab7d11b87f25eec94bcce63a86aa42
|
[
"MIT"
] | null | null | null |
def say_hello():
return {"pepe": "Regresame lo que sea y mata a un pato!", "otra":"2"}
| 30.333333
| 73
| 0.615385
| 16
| 91
| 3.4375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013699
| 0.197802
| 91
| 2
| 74
| 45.5
| 0.739726
| 0
| 0
| 0
| 0
| 0
| 0.516484
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
806fa604249be504d1e65d95b95c658fc5262e6e
| 166
|
py
|
Python
|
tests/unit/test_dwave_trng.py
|
sea212/qpu
|
2ac23e78aed1b358640d6848406427cb2bf2b73d
|
[
"MIT"
] | 1
|
2020-10-20T07:04:15.000Z
|
2020-10-20T07:04:15.000Z
|
tests/unit/test_dwave_trng.py
|
sea212/qpu
|
2ac23e78aed1b358640d6848406427cb2bf2b73d
|
[
"MIT"
] | null | null | null |
tests/unit/test_dwave_trng.py
|
sea212/qpu
|
2ac23e78aed1b358640d6848406427cb2bf2b73d
|
[
"MIT"
] | null | null | null |
import pytest # noqa: F401
import qpu.dwave.cryptography.trng as dwave_trng # noqa: F401
def test_random_deviation():
# TODO: implement
assert(42 == 42)
| 18.444444
| 62
| 0.704819
| 23
| 166
| 4.956522
| 0.73913
| 0.140351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075188
| 0.198795
| 166
| 8
| 63
| 20.75
| 0.781955
| 0.222892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
807fd672b02e1d171dc0dda168327902a516fca4
| 167
|
py
|
Python
|
thinc/tests/mypy/modules/fail_no_plugin.py
|
TheVinhLuong102/thinc
|
7b54f728ddec7765a1d8a5e553d4b4b90b9edaec
|
[
"MIT"
] | 2,542
|
2016-10-20T07:02:59.000Z
|
2022-03-30T20:18:35.000Z
|
thinc/tests/mypy/modules/fail_no_plugin.py
|
TheVinhLuong102/thinc
|
7b54f728ddec7765a1d8a5e553d4b4b90b9edaec
|
[
"MIT"
] | 453
|
2016-10-19T21:09:35.000Z
|
2022-03-31T11:01:15.000Z
|
thinc/tests/mypy/modules/fail_no_plugin.py
|
TheVinhLuong102/thinc
|
7b54f728ddec7765a1d8a5e553d4b4b90b9edaec
|
[
"MIT"
] | 265
|
2016-11-14T14:53:58.000Z
|
2022-03-31T02:25:24.000Z
|
from thinc.api import chain, Relu, reduce_max, Softmax, add
bad_model = chain(Relu(10), reduce_max(), Softmax())
bad_model2 = add(Relu(10), reduce_max(), Softmax())
| 27.833333
| 59
| 0.718563
| 26
| 167
| 4.423077
| 0.538462
| 0.234783
| 0.417391
| 0.26087
| 0.382609
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034014
| 0.11976
| 167
| 5
| 60
| 33.4
| 0.748299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
80b6f78f4b0cbf9d47ea87a56e926f72362e2693
| 180
|
py
|
Python
|
tests/test_input_thermo_elasticity_ess.py
|
Gkdnz/SfePy
|
a3a39d4e087705e9e0e8884cbf63513a2ded2108
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_input_thermo_elasticity_ess.py
|
Gkdnz/SfePy
|
a3a39d4e087705e9e0e8884cbf63513a2ded2108
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_input_thermo_elasticity_ess.py
|
Gkdnz/SfePy
|
a3a39d4e087705e9e0e8884cbf63513a2ded2108
|
[
"BSD-3-Clause"
] | null | null | null |
input_name = '../examples/multi_physics/thermo_elasticity_ess.py'
output_name = 'test_thermo_elasticity_ess.vtk'
from tests_basic import TestInput
class Test(TestInput):
pass
| 25.714286
| 65
| 0.811111
| 25
| 180
| 5.48
| 0.76
| 0.233577
| 0.277372
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 180
| 6
| 66
| 30
| 0.845679
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
80bf9522d06da3ca0067493a14d9e8c1ae06aa83
| 39
|
py
|
Python
|
easyDataverse/__init__.py
|
JR-1991/easyDataverse
|
f4ed6c4b84193207247cd28375f9dac47c77f7d8
|
[
"MIT"
] | 1
|
2022-01-19T14:47:14.000Z
|
2022-01-19T14:47:14.000Z
|
easyDataverse/__init__.py
|
JR-1991/easyDataverse
|
f4ed6c4b84193207247cd28375f9dac47c77f7d8
|
[
"MIT"
] | 1
|
2022-02-22T15:33:11.000Z
|
2022-02-22T15:34:08.000Z
|
easyDataverse/__init__.py
|
JR-1991/easyDataverse
|
f4ed6c4b84193207247cd28375f9dac47c77f7d8
|
[
"MIT"
] | null | null | null |
from easyDataverse.core import Dataset
| 19.5
| 38
| 0.871795
| 5
| 39
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
80ebe605592dc8cb9ae71ae594f35e113b7c9caa
| 139
|
py
|
Python
|
tests/zenko_tests/python_tests/zenko_e2e/fixtures/__init__.py
|
Public-Cloud-Projects/Zenko
|
d62a7bad90b69108bbf9ea1cbd2f9b983f1235b7
|
[
"Apache-2.0"
] | 453
|
2017-07-11T13:14:05.000Z
|
2022-03-31T20:50:01.000Z
|
tests/zenko_tests/python_tests/zenko_e2e/fixtures/__init__.py
|
Public-Cloud-Projects/Zenko
|
d62a7bad90b69108bbf9ea1cbd2f9b983f1235b7
|
[
"Apache-2.0"
] | 987
|
2017-07-11T16:56:45.000Z
|
2022-03-31T19:36:10.000Z
|
tests/zenko_tests/python_tests/zenko_e2e/fixtures/__init__.py
|
Public-Cloud-Projects/Zenko
|
d62a7bad90b69108bbf9ea1cbd2f9b983f1235b7
|
[
"Apache-2.0"
] | 100
|
2017-07-11T16:16:29.000Z
|
2022-03-11T15:07:54.000Z
|
from .backend import *
from .bucket import *
from .data import *
from .replication import *
from .object import *
from .lifecycle import *
| 19.857143
| 26
| 0.741007
| 18
| 139
| 5.722222
| 0.444444
| 0.485437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172662
| 139
| 6
| 27
| 23.166667
| 0.895652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
03e69e14f24dd5c65abb80e9220c910dddf91c08
| 40
|
py
|
Python
|
tests/__init__.py
|
KSanthanam/RaspberryPiMovementDetector
|
a415e5aca22fa41ce0eba0b61cf56d3035f92f64
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
KSanthanam/RaspberryPiMovementDetector
|
a415e5aca22fa41ce0eba0b61cf56d3035f92f64
|
[
"MIT"
] | 1
|
2020-05-21T01:40:11.000Z
|
2020-05-26T03:44:08.000Z
|
tests/__init__.py
|
KSanthanam/RaspberryPiMovementDetector
|
a415e5aca22fa41ce0eba0b61cf56d3035f92f64
|
[
"MIT"
] | 1
|
2020-05-21T17:33:04.000Z
|
2020-05-21T17:33:04.000Z
|
from MovementDetector.Watch import Watch
| 40
| 40
| 0.9
| 5
| 40
| 7.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
206ee6dce1d82fde791de6be693e2f6f71dbce96
| 29
|
py
|
Python
|
python/app/__init__.py
|
manneohrstrom/fashion
|
1bbafdc88446fd12ee8ec4fcbf00a28762c9f67f
|
[
"Apache-2.0"
] | null | null | null |
python/app/__init__.py
|
manneohrstrom/fashion
|
1bbafdc88446fd12ee8ec4fcbf00a28762c9f67f
|
[
"Apache-2.0"
] | null | null | null |
python/app/__init__.py
|
manneohrstrom/fashion
|
1bbafdc88446fd12ee8ec4fcbf00a28762c9f67f
|
[
"Apache-2.0"
] | null | null | null |
from .dialog import AppDialog
| 29
| 29
| 0.862069
| 4
| 29
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2086aac577497edb949c911b9284f13b72ab9c3d
| 39
|
py
|
Python
|
hello.py
|
KamishDamani/MB215Lab1
|
e775cb40de55f9cf96b911748a49be31c891f540
|
[
"MIT"
] | null | null | null |
hello.py
|
KamishDamani/MB215Lab1
|
e775cb40de55f9cf96b911748a49be31c891f540
|
[
"MIT"
] | null | null | null |
hello.py
|
KamishDamani/MB215Lab1
|
e775cb40de55f9cf96b911748a49be31c891f540
|
[
"MIT"
] | null | null | null |
print("hello world from Kamish Damani")
| 39
| 39
| 0.794872
| 6
| 39
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
456b83c4bfcfe854878e47a58151bf0cfb62f611
| 256
|
py
|
Python
|
pastpy/impl/dbf/dbf_database_configuration.py
|
minorg/pastpy
|
81b26442046e11c6bd8f36586cf7db16ac9d8322
|
[
"BSD-2-Clause"
] | null | null | null |
pastpy/impl/dbf/dbf_database_configuration.py
|
minorg/pastpy
|
81b26442046e11c6bd8f36586cf7db16ac9d8322
|
[
"BSD-2-Clause"
] | 38
|
2018-03-18T00:48:26.000Z
|
2018-12-16T00:47:30.000Z
|
pastpy/impl/dbf/dbf_database_configuration.py
|
minorg/pastpy
|
81b26442046e11c6bd8f36586cf7db16ac9d8322
|
[
"BSD-2-Clause"
] | null | null | null |
from pathlib import Path
from typing import Optional, NamedTuple
class DbfDatabaseConfiguration(NamedTuple):
pp_images_dir_path: Optional[Path] = None
pp_install_dir_path: Optional[Path] = None
pp_objects_dbf_file_path: Optional[Path] = None
| 28.444444
| 51
| 0.792969
| 34
| 256
| 5.676471
| 0.5
| 0.186529
| 0.248705
| 0.310881
| 0.259067
| 0.259067
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144531
| 256
| 8
| 52
| 32
| 0.881279
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
45a5b92a04834f5e804a15204b391a8d4dc8028b
| 117
|
py
|
Python
|
okta/models/schema/UserProfileCustomSubschema.py
|
CloudRunnerInc/oktasdk-python
|
b2d19418d2bd0a8c1bbf3764a69e83c148e42a41
|
[
"Apache-2.0"
] | null | null | null |
okta/models/schema/UserProfileCustomSubschema.py
|
CloudRunnerInc/oktasdk-python
|
b2d19418d2bd0a8c1bbf3764a69e83c148e42a41
|
[
"Apache-2.0"
] | 3
|
2018-01-05T20:23:12.000Z
|
2019-03-06T12:00:58.000Z
|
okta/models/schema/UserProfileCustomSubschema.py
|
CloudRunnerInc/oktasdk-python
|
b2d19418d2bd0a8c1bbf3764a69e83c148e42a41
|
[
"Apache-2.0"
] | 1
|
2019-10-23T04:24:49.000Z
|
2019-10-23T04:24:49.000Z
|
from okta.models.schema import BaseCustomSubschema
class UserProfileCustomSubschema(BaseCustomSubschema):
pass
| 19.5
| 54
| 0.846154
| 10
| 117
| 9.9
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 117
| 5
| 55
| 23.4
| 0.951923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
afdabf1bd5812c511114258c48c9cb62a968d358
| 98
|
py
|
Python
|
cogs/__init__.py
|
gio101046/pyvis
|
db2a8525e687689c189d08f601a6e759e6c3be54
|
[
"MIT"
] | null | null | null |
cogs/__init__.py
|
gio101046/pyvis
|
db2a8525e687689c189d08f601a6e759e6c3be54
|
[
"MIT"
] | null | null | null |
cogs/__init__.py
|
gio101046/pyvis
|
db2a8525e687689c189d08f601a6e759e6c3be54
|
[
"MIT"
] | null | null | null |
from cogs.finance import Finance
from cogs.fun import Fun
from cogs.programming import Programming
| 32.666667
| 40
| 0.857143
| 15
| 98
| 5.6
| 0.4
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112245
| 98
| 3
| 40
| 32.666667
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
afe8e488807e1a95ce4277ee56afffc8240706ad
| 127
|
py
|
Python
|
quadpy/enr2/tools.py
|
gdmcbain/quadpy
|
c083d500027d7c1b2187ae06ff2b7fbdd360ccc7
|
[
"MIT"
] | 1
|
2019-01-02T19:04:42.000Z
|
2019-01-02T19:04:42.000Z
|
quadpy/enr2/tools.py
|
gdmcbain/quadpy
|
c083d500027d7c1b2187ae06ff2b7fbdd360ccc7
|
[
"MIT"
] | null | null | null |
quadpy/enr2/tools.py
|
gdmcbain/quadpy
|
c083d500027d7c1b2187ae06ff2b7fbdd360ccc7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
import numpy
def integrate(f, rule, dot=numpy.dot):
return dot(f(rule.points.T), rule.weights)
| 15.875
| 46
| 0.637795
| 20
| 127
| 4.05
| 0.7
| 0.123457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009434
| 0.165354
| 127
| 7
| 47
| 18.142857
| 0.754717
| 0.165354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
b310f84b8a46ae7012911b45c9a3290489853c1d
| 55
|
py
|
Python
|
ProteinFeatureAnalyzer/__init__.py
|
Kortemme-Lab/protein_feature_analysis
|
fa2ae8bc6eb7ecf17e8bf802ab30814461868114
|
[
"MIT"
] | 6
|
2018-08-26T21:38:23.000Z
|
2021-08-13T02:43:38.000Z
|
ProteinFeatureAnalyzer/__init__.py
|
Kortemme-Lab/protein_feature_analysis
|
fa2ae8bc6eb7ecf17e8bf802ab30814461868114
|
[
"MIT"
] | null | null | null |
ProteinFeatureAnalyzer/__init__.py
|
Kortemme-Lab/protein_feature_analysis
|
fa2ae8bc6eb7ecf17e8bf802ab30814461868114
|
[
"MIT"
] | 1
|
2018-01-06T05:46:55.000Z
|
2018-01-06T05:46:55.000Z
|
from . import job_distributors
from . import features
| 13.75
| 30
| 0.8
| 7
| 55
| 6.142857
| 0.714286
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163636
| 55
| 3
| 31
| 18.333333
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b32e75e32c5538011f10b24468fbf72688da8173
| 106
|
py
|
Python
|
goprocam/exceptions.py
|
KonradIT/gopropy
|
57385c84e86b8c47bec504eae9f90756b5187286
|
[
"MIT"
] | null | null | null |
goprocam/exceptions.py
|
KonradIT/gopropy
|
57385c84e86b8c47bec504eae9f90756b5187286
|
[
"MIT"
] | null | null | null |
goprocam/exceptions.py
|
KonradIT/gopropy
|
57385c84e86b8c47bec504eae9f90756b5187286
|
[
"MIT"
] | null | null | null |
class CameraNotConnected(Exception):
pass
class WiredControlAlreadyEstablished(Exception):
pass
| 15.142857
| 48
| 0.792453
| 8
| 106
| 10.5
| 0.625
| 0.309524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150943
| 106
| 6
| 49
| 17.666667
| 0.933333
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
b3379bd83710258fa07d478b65882a28ceb8e9e7
| 58
|
py
|
Python
|
podpac/core/managers/test/test_aws.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 46
|
2018-04-06T19:54:32.000Z
|
2022-02-08T02:00:02.000Z
|
podpac/core/managers/test/test_aws.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 474
|
2018-04-05T22:21:09.000Z
|
2022-02-24T14:21:16.000Z
|
podpac/core/managers/test/test_aws.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 4
|
2019-04-11T17:49:53.000Z
|
2020-11-29T22:36:53.000Z
|
import pytest
import os
class TestAWS(object):
pass
| 8.285714
| 22
| 0.724138
| 8
| 58
| 5.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224138
| 58
| 6
| 23
| 9.666667
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
2fe8af559be65e24a9c277f2611aa5d3c071240c
| 1,040
|
py
|
Python
|
code-challanges/401/multi_bracket_validation/test_multi_bracket_validation.py
|
schoentr/data-structures-and-algorithms
|
535ac617a2ab32293014946b043bdb40a647d43b
|
[
"MIT"
] | null | null | null |
code-challanges/401/multi_bracket_validation/test_multi_bracket_validation.py
|
schoentr/data-structures-and-algorithms
|
535ac617a2ab32293014946b043bdb40a647d43b
|
[
"MIT"
] | 1
|
2019-03-11T02:13:58.000Z
|
2019-03-11T02:13:58.000Z
|
code-challanges/401_code_challenges/multi_bracket_validation/test_multi_bracket_validation.py
|
schoentr/data-structures-and-algorithms
|
535ac617a2ab32293014946b043bdb40a647d43b
|
[
"MIT"
] | null | null | null |
from stacks_and_queues.stacks_and_queues import Queue
from multi_bracket_validation.multi_bracket_validation import bracket_validation
def test_one():
assert bracket_validation('th{his(hlie}v[') == False
def test_two():
assert bracket_validation('{}') == True
def test_three():
assert bracket_validation('()') == True
def test_four():
assert bracket_validation('(') == False
def test_five():
assert bracket_validation(')') == False
def test_six():
assert bracket_validation('[]') == True
def test_seve():
assert bracket_validation('[') == False
def test_eight():
assert bracket_validation(']') == False
def test_nine():
assert bracket_validation('([dog]{cat})') == True
def test_ten():
assert bracket_validation('This(should} not ) pass') == False
def test_eleven():
assert bracket_validation('[this(sdf{sdf(sdfsdf)})]') == True
def test_twelve():
assert bracket_validation('{sdfsdf ()sdfsdf') == False
def test_thirteen():
assert bracket_validation('') == True
| 18.909091
| 80
| 0.691346
| 126
| 1,040
| 5.428571
| 0.309524
| 0.397661
| 0.437135
| 0.157895
| 0.353801
| 0.353801
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161538
| 1,040
| 54
| 81
| 19.259259
| 0.784404
| 0
| 0
| 0
| 0
| 0
| 0.095192
| 0.023077
| 0
| 0
| 0
| 0
| 0.464286
| 1
| 0.464286
| true
| 0.035714
| 0.071429
| 0
| 0.535714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.