hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7762969e468e630cab9912c9e39aa8105f563ce4 | 327 | py | Python | setup.py | zhangalbert/a | 466b10ded2c85e7fceea60a95f081cb0b11b5222 | [
"Apache-2.0"
] | null | null | null | setup.py | zhangalbert/a | 466b10ded2c85e7fceea60a95f081cb0b11b5222 | [
"Apache-2.0"
] | null | null | null | setup.py | zhangalbert/a | 466b10ded2c85e7fceea60a95f081cb0b11b5222 | [
"Apache-2.0"
] | null | null | null | from distutils.core import setup
setup(name='a',
version='0.1.0',
packages=['a', 'a.security'],
install_requires=['WebOb>=1.6.1'],
author = "albert.zhang",
author_email = "longbao.zhang@gmail.com",
description = "This is a very light web framework",
license = "Apache License 2.0",
)
| 27.25 | 57 | 0.611621 | 44 | 327 | 4.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.031496 | 0.223242 | 327 | 11 | 58 | 29.727273 | 0.748032 | 0 | 0 | 0 | 0 | 0 | 0.35474 | 0.070336 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.1 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
7762eadf98a84b5ce9283433a79e9899783c2908 | 4,833 | py | Python | Lectures/DeepLearningClass/chapter2/p01_perceptron.py | Tim232/Python-Things | 05f0f373a4cf298e70d9668c88a6e3a9d1cd8146 | [
"MIT"
] | 2 | 2020-12-05T07:42:55.000Z | 2021-01-06T23:23:18.000Z | Lectures/DeepLearningClass/chapter2/p01_perceptron.py | Tim232/Python-Things | 05f0f373a4cf298e70d9668c88a6e3a9d1cd8146 | [
"MIT"
] | null | null | null | Lectures/DeepLearningClass/chapter2/p01_perceptron.py | Tim232/Python-Things | 05f0f373a4cf298e70d9668c88a6e3a9d1cd8146 | [
"MIT"
] | null | null | null | print('====================================================================================================')
print('== ๋ฌธ์ 27. ์๋์ ์์ ํ์ด์ฌ์ผ๋ก ๊ตฌํํ์์ค.')
print('====================================================================================================\n')
import numpy as np
x = np.array([0, 1])
w = np.array([0.5, 0.5])
print(np.sum(w*x))
print('====================================================================================================')
print('== ๋ฌธ์ 28. ์์ ์์ ์ฑ
52์ชฝ์ ๋์ค๋ ํธํฅ์ ๋ํด์ ์์ฑํ ์๋์ ์์ ํ์ด์ฌ์ผ๋ก ๊ตฌํํ์์ค.')
print('====================================================================================================\n')
x = np.array([0, 1])
w = np.array([0.5, 0.5])
b = np.array([-0.7])
print(np.sum(w*x) + b)
print('====================================================================================================')
print('== ๋ฌธ์ 29. and ๊ฒ์ดํธ๋ฅผ ํ์ด์ฌ์ผ๋ก ๊ตฌํํ์์ค!')
print('====================================================================================================\n')
def AND(x1, x2):
x = np.array([x1, x2])
w = np.array([0.5, 0.5])
return 1 if np.sum(x*w)+b == 1 else 0
print('====================================================================================================')
print('== ๋ฌธ์ 30. ๋ฌธ์ 29๋ฒ์ ํธํฅ์ ํฌํจํด์ AND ๊ฒ์ดํธ ํจ์๋ฅผ ๊ตฌํํ์์ค!')
print('====================================================================================================\n')
def AND(x1, x2):
x = np.array([x1, x2])
w = np.array([0.5, 0.5])
b = -0.7
return 1 if np.sum(x*w)+b > 0 else 0
print('x1: 0, x2: 0 -> ', AND(0, 0))
print('x1: 0, x2: 1 -> ', AND(0, 1))
print('x1: 1, x2: 0 -> ', AND(1, 0))
print('x1: 1, x2: 1 -> ', AND(1, 1))
print('====================================================================================================')
print('== ๋ฌธ์ 32. OR ํจ์๋ฅผ ํ์ด์ฌ์ผ๋ก ๊ตฌํ์์ค!')
print('====================================================================================================\n')
def OR(x1, x2):
x = np.array([x1, x2])
w = np.array([0.5, 0.5])
b = -0.4
return 1 if np.sum(x*w)+b > 0 else 0
print('====================================================================================================')
print('== ๋ฌธ์ 33. XOR ํจ์๋ฅผ ํ์ด์ฌ์ผ๋ก ๊ตฌํ์์ค!')
print('====================================================================================================\n')
def NAND(x1, x2):
x = np.array([x1, x2])
w = np.array([-0.5, -0.5])
b = 0.7
return 1 if np.sum(x*w)+b > 0 else 0
def XOR(x1, x2):
return AND(OR(x1, x2), NAND(x1, x2))
print('====================================================================================================')
print('== ๋ฌธ์ 1. NCS ํ๊ฐ๋ฌธ์ ')
print('====================================================================================================\n')
import numpy as np
x = np.array([1, 2])
y = np.array([3, 4])
print(2*x + y)
print('====================================================================================================')
print('== ๋ฌธ์ 2. NCS ํ๊ฐ๋ฌธ์ ')
print('====================================================================================================\n')
def AND(x1, x2):
x = np.array([x1, x2])
w = np.array([0.5, 0.5])
b = -0.7
return 1 if np.sum(x*w)+b > 0 else 0
def NAND(x1, x2):
x = np.array([x1, x2])
w = np.array([-0.5, -0.5])
b = 0.7
return 1 if np.sum(x*w)+b > 0 else 0
def OR(x1, x2):
x = np.array([x1, x2])
w = np.array([0.5, 0.5])
b = -0.4
return 1 if np.sum(x*w)+b > 0 else 0
def XOR(x1, x2):
return AND(OR(x1, x2), NAND(x1, x2))
print('x1: 0, x2: 0 -> ', XOR(0, 0))
print('x1: 0, x2: 1 -> ', XOR(0, 1))
print('x1: 1, x2: 0 -> ', XOR(1, 0))
print('x1: 1, x2: 1 -> ', XOR(1, 1))
import numpy as np
def andPerceptron(x1, x2):
w1, w2, theta = 0.5, 0.5, 0.7
netInput = x1*w1 + x2*w2
if netInput <= theta:
return 0
elif netInput > theta:
return 1
def nandPerceptron(x1, x2):
w1, w2, theta = -0.5, -0.5, -0.7
netInput = x1*w1 + x2*w2
if netInput <= theta:
return 0
elif netInput > theta:
return 1
def orPerceptron(x1, x2):
w1, w2, bias = 0.5, 0.5, -0.2
netInput = x1*w1 + x2*w2 + bias
if netInput <= 0:
return 0
else:
return 1
def xorPerceptron(x1, x2):
return andPerceptron(orPerceptron(x1, x2), nandPerceptron(x1, x2))
inputData = np.array([[0,0],[0,1],[1,0],[1,1]])
print("---And Perceptron---")
for xs1 in inputData:
print(str(xs1) + " ==> " + str(andPerceptron(xs1[0], xs1[1])))
print("---Nand Perceptron---")
for xs2 in inputData:
print(str(xs2) + " ==> " + str(nandPerceptron(xs2[0], xs2[1])))
print("---Or Perceptron---")
for xs3 in inputData:
print(str(xs3) + " ==> " + str(orPerceptron(xs3[0], xs3[1])))
print("---XOr Perceptron---")
for xs3 in inputData:
print(str(xs3) + " ==> " + str(xorPerceptron(xs3[0], xs3[1]))) | 34.276596 | 111 | 0.354438 | 619 | 4,833 | 2.767367 | 0.119548 | 0.060712 | 0.02627 | 0.028021 | 0.642732 | 0.595447 | 0.553415 | 0.507881 | 0.497373 | 0.420315 | 0 | 0.066977 | 0.156631 | 4,833 | 141 | 112 | 34.276596 | 0.353288 | 0 | 0 | 0.621849 | 0 | 0 | 0.434009 | 0.334299 | 0 | 0 | 0 | 0 | 0 | 1 | 0.109244 | false | 0 | 0.02521 | 0.02521 | 0.268908 | 0.361345 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77639b8b9fd4e3c9cd0c8b422d0c68a3e323eb36 | 239 | py | Python | Python/Transpose and Flatten/Solution.py | chessmastersan/HackerRank | 850319e6f79e7473afbb847d28edde7b2cdfc37d | [
"MIT"
] | 2 | 2019-08-07T19:58:20.000Z | 2019-08-27T00:06:09.000Z | Python/Transpose and Flatten/Solution.py | chessmastersan/HackerRank | 850319e6f79e7473afbb847d28edde7b2cdfc37d | [
"MIT"
] | 1 | 2020-06-11T19:09:48.000Z | 2020-06-11T19:09:48.000Z | Python/Transpose and Flatten/Solution.py | chessmastersan/HackerRank | 850319e6f79e7473afbb847d28edde7b2cdfc37d | [
"MIT"
] | 7 | 2019-08-27T00:06:11.000Z | 2021-12-11T10:01:45.000Z | #author SANKALP SAXENA
import numpy
size = input().split(" ")
n = int(size[0])
l = []
for i in range(0, n):
a = input().split(" ")
l.append(a)
arr = numpy.array(l, int)
trans = arr.transpose()
print(trans)
print(arr.flatten())
| 15.933333 | 26 | 0.606695 | 38 | 239 | 3.815789 | 0.631579 | 0.137931 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010363 | 0.192469 | 239 | 14 | 27 | 17.071429 | 0.740933 | 0.087866 | 0 | 0 | 0 | 0 | 0.009302 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.090909 | 0.181818 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
7763bc166d4d7ffcd7c15717edaf06b9a61c5b0b | 1,564 | py | Python | src/service.py | wisrovi/AWS_DIA_17NOV | bfe734197dd9658504ecea6a9f89162877d80fdf | [
"MIT"
] | null | null | null | src/service.py | wisrovi/AWS_DIA_17NOV | bfe734197dd9658504ecea6a9f89162877d80fdf | [
"MIT"
] | null | null | null | src/service.py | wisrovi/AWS_DIA_17NOV | bfe734197dd9658504ecea6a9f89162877d80fdf | [
"MIT"
] | null | null | null | from flask import Flask, app, jsonify, request, redirect, make_response
from leer_modelo import predecir
ALLOWED = ['png','jpg', 'jpeg', 'gif']
def evaluar_extension_archivo(filename):
tiene_punto = "." in filename
if tiene_punto:
extension_archivo = filename.split(".", 1)[1].lower()
if extension_archivo in ALLOWED:
return True
return False
nombres_parametros = {
"imagen":"file1"
}
html = """
<!doctype html>
<form method="POST" enctype="multipart/form-data">
<label for="fname">Elija su imagen a evaluar:</label>
<input type="file" id="fname" name="file1"><br><br>
<input type="submit" value="Evaluar con RNA">
</form>
"""
nombre_guardar_archivo = "recibido.jpg"
app = Flask(__name__)
@app.route("/RNA", methods=["POST", "GET"])
def recibir_archivo():
if request.method == "POST":
if nombres_parametros["imagen"] not in request.files:
redirect(request.url)
nombre_imagen_recibida = request.files["file1"]
if nombre_imagen_recibida.filename == "":
redirect(request.url)
if evaluar_extension_archivo(nombre_imagen_recibida.filename):
nombre_imagen_recibida.save(nombre_guardar_archivo)
# evaluacion por el modelo de RNA
rta = predecir(nombre_guardar_archivo)
return "La imagen recibida es un " + rta
return html
if __name__=="__main__":
#print(evaluar_extension_archivo("queso.jpg"))
app.run(host="0.0.0.0", port=2022, debug=True)
| 27.928571 | 71 | 0.642583 | 189 | 1,564 | 5.100529 | 0.470899 | 0.082988 | 0.082988 | 0.058091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010788 | 0.22954 | 1,564 | 55 | 72 | 28.436364 | 0.789212 | 0.048593 | 0 | 0.052632 | 0 | 0 | 0.232323 | 0.034343 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.052632 | 0 | 0.210526 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77640065b3e76d426adcd9f96234a785912b54fb | 1,220 | py | Python | system/detection.py | syazanihussin/FLUX | 22a1885c9ff7ef82cd306e4c3544998a71027b5d | [
"MIT"
] | null | null | null | system/detection.py | syazanihussin/FLUX | 22a1885c9ff7ef82cd306e4c3544998a71027b5d | [
"MIT"
] | 1 | 2019-05-26T04:58:17.000Z | 2019-05-30T15:30:51.000Z | system/detection.py | syazanihussin/FLUX | 22a1885c9ff7ef82cd306e4c3544998a71027b5d | [
"MIT"
] | null | null | null | from interface import implements, Interface
from keras.models import load_model
class IDetection(Interface):
def detect_fake_news(self, type, news):
pass
class Detection(implements(IDetection)):
def detect_fake_news(self, type, news):
# load detection model
detection_model = self.load_detection_model(type)
# predict probability
probabilities = detection_model.predict(news)
# get probability according to its assosiated class
class_label, fake_prob, real_prob = self.get_class_label(probabilities)
return class_label, fake_prob, real_prob
def load_detection_model(self, type):
if(type == 'content'):
return load_model('./model/content_model.h5')
elif(type == 'stance'):
return load_model('./model/stance_model.h5')
def get_class_label(self, probabilities):
for probability in probabilities:
fake_prob = probability[0]
real_prob = probability[1]
if(fake_prob > real_prob):
class_label = 'Fake'
elif(real_prob > fake_prob):
class_label = 'Real'
return class_label, fake_prob, real_prob
| 25.957447 | 79 | 0.64918 | 142 | 1,220 | 5.323944 | 0.274648 | 0.092593 | 0.074074 | 0.084656 | 0.195767 | 0.195767 | 0.161376 | 0 | 0 | 0 | 0 | 0.004484 | 0.268852 | 1,220 | 46 | 80 | 26.521739 | 0.843049 | 0.07377 | 0 | 0.16 | 0 | 0 | 0.060391 | 0.041741 | 0 | 0 | 0 | 0 | 0 | 1 | 0.16 | false | 0.04 | 0.08 | 0 | 0.48 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77650de3eab4c78cafaa8bab4bb47104bd26bb30 | 2,926 | py | Python | Spark-Example-FlightsData/flights_example.py | kiat/met-cs777 | 405aca20c8db4350a7a0d04607ff05f3c4e97c2f | [
"BSD-3-Clause"
] | 32 | 2020-07-02T00:51:13.000Z | 2022-03-31T22:31:30.000Z | Flight-Example/flights_example.py | pvkothapalli/MET-CS777 | 6825bec99581b04f9bcc328cdaa698faa2af7313 | [
"BSD-3-Clause"
] | 2 | 2021-02-17T00:42:23.000Z | 2021-02-18T17:28:38.000Z | Flight-Example/flights_example.py | pvkothapalli/MET-CS777 | 6825bec99581b04f9bcc328cdaa698faa2af7313 | [
"BSD-3-Clause"
] | 47 | 2020-07-04T15:29:15.000Z | 2022-03-25T05:08:17.000Z |
# https://s3.amazonaws.com/metcs777/flights.csv.bz2
# s3n://metcs777/flights.csv.bz2
# lines = sc.textFile("file:///home/kia/Data/Collected-Datasets/flight-delays/flight-delays/flights.csv")
lines = sc.textFile("s3://metcs777/flights.csv.bz2")
# Removing the Header Line from CSV file
linesHeader = lines.first()
header = sc.parallelize([linesHeader])
linesWithOutHeader = lines.subtract(header)
# The data is about the flights from different airports which includes following attributes
#[u'YEAR,MONTH,DAY,DAY_OF_WEEK,AIRLINE,FLIGHT_NUMBER,TAIL_NUMBER,ORIGIN_AIRPORT,DESTINATION_AIRPORT,SCHEDULED_DEPARTURE,DEPARTURE_TIME,DEPARTURE_DELAY,TAXI_OUT,WHEELS_OFF,SCHEDULED_TIME,ELAPSED_TIME,AIR_TIME,DISTANCE,WHEELS_ON,TAXI_IN,SCHEDULED_ARRIVAL,ARRIVAL_TIME,ARRIVAL_DELAY,DIVERTED,CANCELLED,CANCELLATION_REASON,AIR_SYSTEM_DELAY,SECURITY_DELAY,AIRLINE_DELAY,LATE_AIRCRAFT_DELAY,WEATHER_DELAY']
flights = linesWithOutHeader.map(lambda x: x.split(','))
# YEAR,MONTH,DAY,DAY_OF_WEEK,AIRLINE,FLIGHT_NUMBER,TAIL_NUMBER,ORIGIN_AIRPORT,DESTINATION_AIRPORT,SCHEDULED_DEPARTURE,DEPARTURE_TIME,DEPARTURE_DELAY, CANCELLED
mainFlightsData = flights.map(lambda p: (p[0], p[1] , p[2] , p[3], p[4] , p[5] , p[6], p[7] , p[8] , p[9], p[10], p[11], p[24] ))
# number 6 is ORIGIN_AIRPORT
flightsFromBoston = mainFlightsData.filter(lambda p: True if p[7] == "BOS" else False )
# Get the total number of Flights from BOS
flightsFromBoston.count()
# 107847 flights from Logan Airport in Boston
# Find the subset of flights departing on the weekend.
weekEndFlights = flightsFromBoston.filter(lambda p: True if (int(p[3]) == 6 or int(p[3]) ==7) else False )
weekEndFlights.count()
# 26092
#Q1 Find a list of Origin Airports
#Q2 Find a list of (Origin, Destination) pairs
#Q3 Find the Origin airport which had the largest departure delay in the month of January
#Q4 Find out which carrier has the largest delay on Weekends.
#Q5 Which airport has the most cancellation of flights?
#Q6 Find the percent of flights cancelled for each carrier.
#Q7 Find the largest departure delay for each carrier
#Q8 Find the largest departure delay for each carrier for each month
#Q9 For each carrier find the average Departure delay
#Q10 For each carrier find the average Departure delay for each month
#Q11 Which date of year has the highest rate of flight cancellations?
# Rate of flight cancellation is calculated by deviding number of canceled flights by total number of flights.
#Q12 Calculate the number of flights to each destination state
# For each carrier, for which state do they have the largest average delay?
# You will need the airline and airport data sets for this question.
# AirLine dataset https://s3.amazonaws.com/metcs777/airlines.csv or s3://metcs777/airlines.csv
# Airport dataset https://s3.amazonaws.com/metcs777/airports.csv or s3://metcs777/airports.csv
# add your own questions.
| 31.12766 | 400 | 0.775803 | 449 | 2,926 | 4.973274 | 0.36971 | 0.025078 | 0.037618 | 0.025526 | 0.262875 | 0.21854 | 0.188088 | 0.188088 | 0.112853 | 0.112853 | 0 | 0.031127 | 0.132604 | 2,926 | 93 | 401 | 31.462366 | 0.8487 | 0.757348 | 0 | 0 | 0 | 0 | 0.049327 | 0.043348 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77678f260d8155b4fbe2ca23186e87d0ca0dde81 | 1,510 | py | Python | setup.py | farirat/rima | eb2165d972256c32d4859c3bf54c5e4b589d780f | [
"Apache-2.0"
] | 2 | 2016-05-07T10:09:29.000Z | 2016-06-10T05:36:20.000Z | setup.py | farirat/rima | eb2165d972256c32d4859c3bf54c5e4b589d780f | [
"Apache-2.0"
] | null | null | null | setup.py | farirat/rima | eb2165d972256c32d4859c3bf54c5e4b589d780f | [
"Apache-2.0"
] | null | null | null | from setuptools import setup, find_packages
import sys, os
version = '0.0'
setup(name='rima',
version=version,
description="Minimalist Python REST API Framework",
long_description="""\
Minimalist Python REST API Framework
""",
classifiers=[
"Development Status :: 1 - Planning",
"Environment :: Web Environment",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Server",
], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='REST API',
author='Ghassen Telmoudi',
author_email='ghassen.telmoudi@gmail.com',
url='',
license='',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"tornado",
"mongoengine",
],
entry_points="""
# -*- Entry points: -*-
""",
)
| 34.318182 | 82 | 0.575497 | 140 | 1,510 | 6.128571 | 0.628571 | 0.060606 | 0.055944 | 0.06993 | 0.156177 | 0.100233 | 0 | 0 | 0 | 0 | 0 | 0.006458 | 0.282119 | 1,510 | 43 | 83 | 35.116279 | 0.785055 | 0.065563 | 0 | 0.1 | 0 | 0 | 0.514915 | 0.018466 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.05 | 0 | 0.05 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
7767ac0e64dfc02b7c55003a7ff7dca82f05c66d | 10,438 | py | Python | src/backend/aspen/workflows/nextstrain_run/export.py | chanzuckerberg/covidr | afe05d703d30ec18ac83944bfb551c313cb216c4 | [
"MIT"
] | 2 | 2021-01-13T18:37:20.000Z | 2021-01-16T02:17:38.000Z | src/backend/aspen/workflows/nextstrain_run/export.py | chanzuckerberg/covidr | afe05d703d30ec18ac83944bfb551c313cb216c4 | [
"MIT"
] | 16 | 2021-01-14T20:21:13.000Z | 2021-01-29T22:23:13.000Z | src/backend/aspen/workflows/nextstrain_run/export.py | chanzuckerberg/covidr | afe05d703d30ec18ac83944bfb551c313cb216c4 | [
"MIT"
] | null | null | null | import csv
import io
import json
from pathlib import Path
from typing import Any, Iterable, List, Mapping, MutableMapping, Set, Tuple
import click
from sqlalchemy import and_
from sqlalchemy.orm import aliased, joinedload, with_polymorphic
from aspen.config.config import Config
from aspen.database.connection import (
get_db_uri,
init_db,
session_scope,
SqlAlchemyInterface,
)
from aspen.database.models import (
AlignedGisaidDump,
Entity,
EntityType,
PathogenGenome,
PhyloRun,
PublicRepositoryType,
Sample,
UploadedPathogenGenome,
)
from aspen.database.models.workflow import Workflow
METADATA_CSV_FIELDS = [
"strain",
"virus",
"gisaid_epi_isl",
"genbank_accession",
"date",
"region",
"country",
"division",
"location",
"region_exposure",
"country_exposure",
"division_exposure",
"segment",
"length",
"host",
"age",
"sex",
"pango_lineage",
"GISAID_clade",
"originating_lab",
"submitting_lab",
"authors",
"url",
"title",
"paper_url",
"date_submitted",
]
@click.command("save")
@click.option("--phylo-run-id", type=int, required=True)
@click.option("sequences_fh", "--sequences", type=click.File("w"), required=False)
@click.option("selected_fh", "--selected", type=click.File("w"), required=False)
@click.option("metadata_fh", "--metadata", type=click.File("w"), required=False)
@click.option("builds_file_fh", "--builds-file", type=click.File("w"), required=True)
@click.option(
"county_sequences_fh", "--county-sequences", type=click.File("w"), required=False
)
@click.option(
"county_metadata_fh", "--county-metadata", type=click.File("w"), required=False
)
@click.option("--test", type=bool, is_flag=True)
def cli(
phylo_run_id: int,
sequences_fh: io.TextIOBase,
selected_fh: io.TextIOBase,
metadata_fh: io.TextIOBase,
builds_file_fh: io.TextIOBase,
county_sequences_fh: io.TextIOBase,
county_metadata_fh: io.TextIOBase,
test: bool,
):
if test:
print("Success!")
return
interface: SqlAlchemyInterface = init_db(get_db_uri(Config()))
with session_scope(interface) as session:
# this allows us to load the secondary tables of a polymorphic type. In this
# case, we want to load the inputs of a phylo run, provided the input is of type
# `PathogenGenome` and `AlignedGisaidDump`.
phylo_run_inputs = with_polymorphic(
Entity,
[PathogenGenome, AlignedGisaidDump],
flat=True,
)
phylo_run: PhyloRun = (
session.query(PhyloRun)
.filter(PhyloRun.workflow_id == phylo_run_id)
.options(
joinedload(PhyloRun.inputs.of_type(phylo_run_inputs)).undefer(
phylo_run_inputs.PathogenGenome.sequence
)
)
.one()
)
# If we're writing a file for all county-wide samples, generate it here.
if county_sequences_fh:
# Get all samples for the group
group = phylo_run.group
all_samples: Iterable[Sample] = (
session.query(Sample)
.filter(Sample.submitting_group_id == group.id)
.options(
joinedload(Sample.uploaded_pathogen_genome, innerjoin=True).undefer(
PathogenGenome.sequence
)
)
)
pathogen_genomes = [
sample.uploaded_pathogen_genome for sample in all_samples
]
# Write all those samples to the sequences/metadata files
write_sequences_files(
session, pathogen_genomes, county_sequences_fh, county_metadata_fh
)
# Populate builds.yaml file with values from the phylo_run template_args
# and write them to the filesystem
aspen_root = Path(__file__).parent.parent.parent.parent.parent
with (aspen_root / phylo_run.template_file_path).open("r") as build_template_fh:
build_template = build_template_fh.read()
template_args = (
phylo_run.template_args
if isinstance(phylo_run.template_args, Mapping)
else {}
)
builds_file_fh.write(build_template.format(**template_args))
# get all the children that are pathogen genomes
pathogen_genomes = [
inp for inp in phylo_run.inputs if isinstance(inp, PathogenGenome)
]
# get the aligned gisaid run info.
aligned_gisaid = [
inp for inp in phylo_run.inputs if isinstance(inp, AlignedGisaidDump)
][0]
if sequences_fh:
write_sequences_files(session, pathogen_genomes, sequences_fh, metadata_fh)
if selected_fh:
write_includes_file(session, phylo_run, pathogen_genomes, selected_fh)
print(
json.dumps(
{
"bucket": aligned_gisaid.s3_bucket,
"metadata_key": aligned_gisaid.metadata_s3_key,
"sequences_key": aligned_gisaid.sequences_s3_key,
}
)
)
def write_includes_file(session, phylo_run, pathogen_genomes, selected_fh):
# Create a list of the inputted pathogen genomes that are uploaded pathogen genomes
sample_ids: List[int] = [
pathogen_genome.sample_id
for pathogen_genome in pathogen_genomes
if isinstance(pathogen_genome, UploadedPathogenGenome)
]
# Write an includes.txt with the sample ID's.
sample_query = session.query(Sample).filter(Sample.id.in_(sample_ids))
for sample in sample_query:
public_identifier = sample.public_identifier
if public_identifier.lower().startswith("hcov-19"):
public_identifier = public_identifier[8:]
selected_fh.write(f"{public_identifier}\n")
for gisaid_id in phylo_run.gisaid_ids:
selected_fh.write(f"{gisaid_id}\n")
def write_sequences_files(session, pathogen_genomes, sequences_fh, metadata_fh):
# Create a list of the inputted pathogen genomes that are uploaded pathogen genomes
uploaded_pathogen_genomes = {
pathogen_genome
for pathogen_genome in pathogen_genomes
if isinstance(pathogen_genome, UploadedPathogenGenome)
}
sample_ids = {
uploaded_pathogen_genome.sample_id
for uploaded_pathogen_genome in uploaded_pathogen_genomes
}
sample_id_to_sample: Mapping[int, Sample] = {
sample.id: sample
for sample in session.query(Sample).filter(Sample.id.in_(sample_ids))
}
accession_input_alias = aliased(Entity)
pathogen_genome_id_repository_type_to_accession_names: Mapping[
Tuple[int, PublicRepositoryType], str
] = {
(
accession.get_parents(PathogenGenome)[0].entity_id,
PublicRepositoryType.from_entity_type(accession.entity_type),
): accession.public_identifier
# We have overlap between aligned gisaid file & aspen data.
for accession in session.query(Entity)
.join(Entity.producing_workflow)
.join(accession_input_alias, Workflow.inputs)
.filter(
and_(
Entity.entity_type.in_(
(
EntityType.GISAID_REPOSITORY_SUBMISSION,
EntityType.GENBANK_REPOSITORY_SUBMISSION,
)
),
accession_input_alias.id.in_(
{pathogen_genome.entity_id for pathogen_genome in pathogen_genomes}
),
)
)
}
aspen_samples: Set[str] = set()
metadata_csv_fh = csv.DictWriter(metadata_fh, METADATA_CSV_FIELDS, delimiter="\t")
metadata_csv_fh.writeheader()
for pathogen_genome in pathogen_genomes:
# find the corresponding sample
if isinstance(pathogen_genome, UploadedPathogenGenome):
sample_id = pathogen_genome.sample_id
else:
raise ValueError("pathogen genome of unknown type")
sample = sample_id_to_sample[sample_id]
aspen_samples.add(sample.public_identifier)
sequence = "".join(
[
line
for line in pathogen_genome.sequence.splitlines()
if not (line.startswith(">") or line.startswith(";"))
]
)
sequence = sequence.strip("Nn")
upload_date = None
if sample.sequencing_reads_collection is not None:
upload_date = sample.sequencing_reads_collection.upload_date.strftime(
"%Y-%m-%d"
)
elif sample.uploaded_pathogen_genome is not None:
upload_date = sample.uploaded_pathogen_genome.upload_date.strftime(
"%Y-%m-%d"
)
aspen_metadata_row: MutableMapping[str, Any] = {
"strain": sample.public_identifier,
"virus": "ncov",
"gisaid_epi_isl": pathogen_genome_id_repository_type_to_accession_names.get(
(pathogen_genome.entity_id, PublicRepositoryType.GISAID), ""
),
"genbank_accession": pathogen_genome_id_repository_type_to_accession_names.get(
(pathogen_genome.entity_id, PublicRepositoryType.GENBANK), ""
),
"date": sample.collection_date.strftime("%Y-%m-%d"),
"date_submitted": upload_date,
"region": sample.region.value,
"country": sample.country,
"division": sample.division,
"location": sample.location,
"region_exposure": sample.region.value,
"country_exposure": sample.country,
"division_exposure": sample.division,
"segment": "genome",
"length": len(sequence),
"host": "Human",
"age": "?",
"sex": "?",
"originating_lab": sample.sample_collected_by,
"submitting_lab": sample.submitting_group.name,
"authors": ", ".join(sample.authors),
"pango_lineage": sample.uploaded_pathogen_genome.pangolin_lineage,
}
metadata_csv_fh.writerow(aspen_metadata_row)
sequences_fh.write(f">{sample.public_identifier}\n")
sequences_fh.write(sequence)
sequences_fh.write("\n")
if __name__ == "__main__":
cli()
| 35.144781 | 91 | 0.627132 | 1,122 | 10,438 | 5.567736 | 0.213904 | 0.056027 | 0.024652 | 0.013446 | 0.245718 | 0.227789 | 0.191452 | 0.185369 | 0.171923 | 0.128382 | 0 | 0.001064 | 0.279651 | 10,438 | 296 | 92 | 35.263514 | 0.829765 | 0.079805 | 0 | 0.070313 | 0 | 0 | 0.087705 | 0.005214 | 0 | 0 | 0 | 0 | 0 | 1 | 0.011719 | false | 0 | 0.046875 | 0 | 0.0625 | 0.007813 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77685ce53cb05d06d03bc4a12c6065a439cae421 | 485 | py | Python | dictlearn_gpu/utils.py | mukheshpugal/dictlearn_gpu | 79265ddc439e2a406adbe7e505b1b6d61fa50d25 | [
"MIT"
] | null | null | null | dictlearn_gpu/utils.py | mukheshpugal/dictlearn_gpu | 79265ddc439e2a406adbe7e505b1b6d61fa50d25 | [
"MIT"
] | null | null | null | dictlearn_gpu/utils.py | mukheshpugal/dictlearn_gpu | 79265ddc439e2a406adbe7e505b1b6d61fa50d25 | [
"MIT"
] | null | null | null | import cupy as cp
import numpy as np
def to_gpu(mat):
if not isinstance(mat, cp.ndarray):
return cp.asarray(mat, dtype=cp.float32)
return mat
def dct_dict_1d(n_atoms, size):
dct = np.zeros((size, n_atoms))
for k in range(n_atoms):
basis = np.cos(np.arange(size) * k * np.pi / n_atoms)
if k > 0:
basis = basis - np.mean(basis)
basis /= np.linalg.norm(basis)
dct[:, k] = basis
return dct
| 22.045455 | 62 | 0.560825 | 75 | 485 | 3.533333 | 0.493333 | 0.090566 | 0.090566 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012158 | 0.321649 | 485 | 21 | 63 | 23.095238 | 0.793313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.133333 | 0 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
7768c92d141b2480a0d707c158a88bcde5139d2f | 4,263 | py | Python | yaglm/opt/prox.py | ngierty/yaglm | 57e212b576af7c5c0f0d62f9d6ad2484b9e25f10 | [
"MIT"
] | 15 | 2021-10-19T15:00:12.000Z | 2022-03-24T22:24:54.000Z | yaglm/opt/prox.py | ngierty/yaglm | 57e212b576af7c5c0f0d62f9d6ad2484b9e25f10 | [
"MIT"
] | 13 | 2021-07-22T21:52:55.000Z | 2021-10-07T11:50:10.000Z | yaglm/opt/prox.py | ngierty/yaglm | 57e212b576af7c5c0f0d62f9d6ad2484b9e25f10 | [
"MIT"
] | 2 | 2021-11-12T18:45:12.000Z | 2021-12-14T14:57:42.000Z | import numpy as np
from yaglm.linalg_utils import euclid_norm
def soft_thresh(vec, thresh_vals):
"""
The soft thresholding operator.
Parameters
----------
vec: array-like
The values to threshold
thresh_vals: float, array-like
The thresholding values
Output
-------
vec_thresh: array-like
"""
return np.sign(vec) * np.fmax(abs(vec) - thresh_vals, 0)
# TODO: is this useful? If not remove it
# def prox_ridge_lasso(x, lasso_pen_val=1, lasso_weights=None,
# ridge_pen_val=1, ridge_weights=None, step=1):
# """
# Evaluates the proximal operator of
# f(x; step) = lasso_mul * sum_j lasso_weights_j |x_j|
# + 0.5 * ridge_mul * sum_j lasso_weights_j x_j^2
# Parameters
# ----------
# x: array-like
# The value at which to evaluate the prox operator.
# lasso_pen_val: float
# The multiplicative penalty value for the lasso penalty.
# lasso_weights: None, array-like
# The (optional) variable weights for the lasso penalty.
# ridge_pen_val: float
# The multiplicative penalty value for the ridge penalty.
# ridge_weights: None, array-like
# The (optional) variable weights for the ridge penalty.
# step: float
# The step size.
# Output
# ------
# prox_val: array-like
# The proximal operator.
# """
# lasso_pen_val = lasso_pen_val * step
# ridge_pen_val = ridge_pen_val * step
# if lasso_weights is None:
# lasso_weights = np.ones_like(x)
# thresh = lasso_pen_val * np.array(lasso_weights)
# if ridge_weights is None:
# ridge_weights = np.ones_like(x)
# mult = ridge_pen_val * np.array(ridge_weights)
# mult = 1 / (1 + mult)
# return soft_thresh(x * mult, thresh * mult)
# TODO: is this useful? If not remove it.
# def prox_ridge_perturb(x, prox, ridge_pen_val=1, step=1):
# """
# Evaluates the proximal operator of
# f(x) + 0.5 * ridge_pen_val ||x||_2^2
# e.g. see Theorem 6.13 of (Beck, 2017).
# Parameters
# ----------
# x: array-like
# The value at which to evaluate the prox operator.
# prox: callable(x, step) -> array-like
# The proximal operator of f.
# ridge_pen_val: float
# The ridge penalty value.
# step: float
# The step size.
# Output
# ------
# prox_val: array-like
# The proximal operator.
# References
# ----------
# Beck, A., 2017. First-order methods in optimization. Society for Industrial and Applied Mathematics.
# """
# denom = ridge_pen_val * step + 1
# return prox(x / denom, step=step / denom)
def L2_prox(x, mult):
"""
Computes the proximal operator of mult * ||x||_2
"""
norm = euclid_norm(x)
if norm <= mult:
return np.zeros_like(x)
else:
return x * (1 - (mult / norm))
def squared_l1_prox_pos(x, step=1, weights=None, check=False):
"""
prox_{step * f}(x) for postive vectors x
where f(z) = (sum_i w_i |z_i|)^2
Parameters
----------
x: array-like
The vector to evaluate the prox at. Note this must be positive.
step: float
The prox step size.
weights: array-like
The (optional) positive weights.
check: bool
Whether or not to check that x is non-negative.
Output
------
p: array-like
The value of the proximal operator.
References
----------
Lin, M., Sun, D., Toh, K.C. and Yuan, Y., 2019. A dual Newton based preconditioned proximal point algorithm for exclusive lasso models. arXiv preprint arXiv:1902.00151.
"""
if check:
assert all(x >= 0)
if weights is None:
weights = np.ones_like(x)
# get indices to sort x / weights in decreasing order
x_over_w = x / weights
decr_sort_idxs = np.argsort(x_over_w)[::-1]
x_sort = x[decr_sort_idxs]
weights_sort = weights[decr_sort_idxs]
# compute threshold value
s = np.cumsum(x_sort * weights_sort)
L = np.cumsum(weights_sort ** 2)
alpha_bar = max(s / (1 + 2 * step * L))
thresh = 2 * step * alpha_bar * weights
# return soft thresholding
return np.maximum(x - thresh, 0)
| 25.076471 | 172 | 0.604269 | 597 | 4,263 | 4.157454 | 0.268007 | 0.033844 | 0.058018 | 0.033844 | 0.311039 | 0.267123 | 0.257051 | 0.257051 | 0.239323 | 0.172442 | 0 | 0.017286 | 0.280788 | 4,263 | 169 | 173 | 25.224852 | 0.792237 | 0.726484 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005917 | 0.041667 | 1 | 0.125 | false | 0 | 0.083333 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77692efc43ca99a211134cc384641108b1111a00 | 902 | py | Python | HW-1.py | SESCNCFUARTYOM/repforkeys | a20b14f16f3dd0df4f95b9927855b77bde7ef8e3 | [
"MIT"
] | null | null | null | HW-1.py | SESCNCFUARTYOM/repforkeys | a20b14f16f3dd0df4f95b9927855b77bde7ef8e3 | [
"MIT"
] | null | null | null | HW-1.py | SESCNCFUARTYOM/repforkeys | a20b14f16f3dd0df4f95b9927855b77bde7ef8e3 | [
"MIT"
] | null | null | null | n, i, m, g, gg = 0, 0, 0, 0, 0
n = int(input())
if n > 1983:
while n > 1983:
n -= 60
m = n - 1923
else:
while n < 1983:
n += 60
m = n - 1983
g = (m // 12) + 1
gg = m % 12
result = ''
if g == 1:
result = 'ะทะตะปะตะฝะพะน'
elif g == 2:
result = 'ะบัะฐัะฝะพะน'
elif g == 3:
result = 'ะถะตะปัะพะน'
elif g == 4:
result = 'ะฑะตะปะพะน'
elif g == 5:
result = 'ัะตัะฝะพะน'
elif g == 6:
result = 'ัะตัะฝะพะน'
if gg == 1:
result += 'ะบัััั'
elif gg == 2:
result += 'ะบะพัะพะฒั'
elif gg == 3:
result += ' ัะธะณัะฐ'
elif gg == 4:
result += ' ะทะฐะนัะฐ'
elif gg == 5:
result += ' ะดัะฐะบะพะฝะฐ'
elif gg == 6:
result += ' ะทะผะตะธ'
elif gg == 7:
result += ' ะปะพัะฐะดะธ'
elif gg == 8:
result += ' ะพะฒัั'
elif gg == 9:
result += ' ะพะฑะตะทััะฝั'
elif gg == 10:
result += ' ะบััะธัั'
elif gg == 11:
result += ' ัะพะฑะฐะบะธ'
elif gg == 0:
result += ' ัะฒะธะฝัะธ'
print(result) | 15.824561 | 30 | 0.473392 | 133 | 902 | 3.210526 | 0.345865 | 0.154567 | 0.021077 | 0.018735 | 0.070258 | 0.070258 | 0.070258 | 0 | 0 | 0 | 0 | 0.091681 | 0.347007 | 902 | 57 | 31 | 15.824561 | 0.633277 | 0 | 0 | 0.04 | 0 | 0 | 0.127353 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.02 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77694ac8b2dc5a16098243dd7f10b1803ad2cd33 | 1,961 | py | Python | App/workflow.py | dataminion/ScienceManager | 924e1de41aca56985d9e30750264eb995604f352 | [
"MIT"
] | null | null | null | App/workflow.py | dataminion/ScienceManager | 924e1de41aca56985d9e30750264eb995604f352 | [
"MIT"
] | null | null | null | App/workflow.py | dataminion/ScienceManager | 924e1de41aca56985d9e30750264eb995604f352 | [
"MIT"
] | null | null | null | #workflow
import sys
##python libraries
import argparse
import logging
import logging.config
##
## Internal libraries
from service.configuration import Configuration
from service.job import Job
from model.object import Object
from model.connection import ConnectionSettings
from model.workflow import *
class Program(object):
""" A simple data structure to hold job parameters """
Log = logging
M = Object()
S = Object()
Args = Object()
M.Workflow = Workflow()
M.Tasks = []
S.Conf = Configuration(file='./var/cfg.yaml')
logging.config.dictConfig(S.Conf.get_logging())
def __init__(self):
pass
def main(self):
self._handle_arguments()
program = None
program = self.Args.program
if program is not None:
self.M.conn = self.S.Conf.set_database_connection(ConnectionSettings())
self.S.job = Job(self.Log, self.M.conn)
#
self.M.Workflow = self.S.job.get_job_details(self.M.Workflow, self.Args.program)
#
self.M.Workflow.batch_id = self.S.job.register_job(1, 1)
#
self.M.Workflow.tasks = self.S.job.setup_job(self.M.Workflow.id)
#
self.S.job.process_job_items(self.M.Workflow.tasks)
def _handle_arguments(self):
self.Log.info('handling arguments')
parser = argparse.ArgumentParser(description='Teradata "Science Manager" v0.1 ')
parser.add_argument('-p', '--program', help='set the program to be executed')
parser.add_argument('-l', '--list', help='get a list of available programs')
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
parser.parse_args(namespace=self.Args)
class Error(Exception):
def __init__(self, m='Workflow error occurred.'):
self.message = m
def __str__(self):
return self.message
if __name__ == "__main__":
Program().main() | 30.169231 | 92 | 0.63743 | 249 | 1,961 | 4.86747 | 0.37751 | 0.037129 | 0.075083 | 0.021452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004046 | 0.243753 | 1,961 | 65 | 93 | 30.169231 | 0.813216 | 0.046405 | 0 | 0 | 0 | 0 | 0.095469 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.106383 | false | 0.021277 | 0.191489 | 0.021277 | 0.446809 | 0.021277 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
776b5a586a307c515edd0602fb6ce773dfcc4ddd | 5,969 | py | Python | cloudify_aws/kms/tests/test_key.py | jrzeszutek/cloudify-aws-plugin | 59832b4ac5ddad496110085ed2e21dd36db5e9df | [
"Apache-2.0"
] | 13 | 2015-05-28T23:21:05.000Z | 2022-03-20T05:38:20.000Z | cloudify_aws/kms/tests/test_key.py | jrzeszutek/cloudify-aws-plugin | 59832b4ac5ddad496110085ed2e21dd36db5e9df | [
"Apache-2.0"
] | 49 | 2015-01-04T16:05:34.000Z | 2022-03-27T11:35:13.000Z | cloudify_aws/kms/tests/test_key.py | jrzeszutek/cloudify-aws-plugin | 59832b4ac5ddad496110085ed2e21dd36db5e9df | [
"Apache-2.0"
] | 41 | 2015-01-21T17:16:05.000Z | 2022-03-31T06:47:48.000Z | # Copyright (c) 2018 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Standard imports
import unittest
# Third party imports
from mock import MagicMock
# Local imports
from cloudify_aws.common.tests.test_base import CLIENT_CONFIG
from cloudify_aws.kms.tests.test_kms import TestKMS
from cloudify_aws.kms.resources import key
# Constants
KEY_TH = ['cloudify.nodes.Root',
'cloudify.nodes.aws.kms.CustomerMasterKey']
NODE_PROPERTIES = {
'use_external_resource': False,
'resource_config': {
"kwargs": {
"Description": "An example CMK.",
"Tags": [{
"TagKey": "Cloudify",
"TagValue": "Example"
}]
}
},
'client_config': CLIENT_CONFIG
}
RUNTIME_PROPERTIES = {
'resource_config': {}
}
RUNTIME_PROPERTIES_AFTER_CREATE = {
'aws_resource_arn': 'arn_id',
'aws_resource_id': 'key_id',
'resource_config': {}
}
class TestKMSKey(TestKMS):
def test_prepare(self):
self._prepare_check(
type_hierarchy=KEY_TH,
type_name='kms',
type_class=key
)
def test_create_raises_UnknownServiceError(self):
self._prepare_create_raises_UnknownServiceError(
type_hierarchy=KEY_TH,
type_name='kms',
type_class=key
)
def test_create(self):
_ctx = self._prepare_context(
KEY_TH, NODE_PROPERTIES
)
self.fake_client.create_key = MagicMock(return_value={
'KeyMetadata': {
'Arn': "arn_id",
'KeyId': 'key_id'
}
})
key.create(ctx=_ctx, resource_config=None, iface=None)
self.fake_boto.assert_called_with('kms', **CLIENT_CONFIG)
self.fake_client.create_key.assert_called_with(
Description='An example CMK.',
Tags=[{'TagKey': 'Cloudify', 'TagValue': 'Example'}]
)
self.assertEqual(
_ctx.instance.runtime_properties,
RUNTIME_PROPERTIES_AFTER_CREATE
)
def test_enable(self):
_ctx = self._prepare_context(
KEY_TH, NODE_PROPERTIES, RUNTIME_PROPERTIES_AFTER_CREATE
)
self.fake_client.schedule_key_deletion = MagicMock(return_value={})
key.enable(ctx=_ctx, resource_config=None, iface=None)
self.fake_boto.assert_called_with('kms', **CLIENT_CONFIG)
self.assertEqual(
_ctx.instance.runtime_properties,
RUNTIME_PROPERTIES_AFTER_CREATE
)
def test_disable(self):
_ctx = self._prepare_context(
KEY_TH, NODE_PROPERTIES, RUNTIME_PROPERTIES_AFTER_CREATE
)
self.fake_client.schedule_key_deletion = MagicMock(return_value={})
key.disable(ctx=_ctx, resource_config=None, iface=None)
self.fake_boto.assert_called_with('kms', **CLIENT_CONFIG)
self.assertEqual(
_ctx.instance.runtime_properties,
RUNTIME_PROPERTIES_AFTER_CREATE
)
def test_delete(self):
_ctx = self._prepare_context(
KEY_TH, NODE_PROPERTIES, RUNTIME_PROPERTIES_AFTER_CREATE
)
self.fake_client.schedule_key_deletion = MagicMock(return_value={})
key.delete(ctx=_ctx, resource_config=None, iface=None)
self.fake_boto.assert_called_with('kms', **CLIENT_CONFIG)
self.fake_client.schedule_key_deletion.assert_called_with(
KeyId='key_id'
)
self.assertEqual(
_ctx.instance.runtime_properties,
RUNTIME_PROPERTIES_AFTER_CREATE
)
def test_KMSKey_status(self):
test_instance = key.KMSKey("ctx_node", resource_id='queue_id',
client=self.fake_client, logger=None)
self.assertEqual(test_instance.status, None)
def test_KMSKey_properties(self):
test_instance = key.KMSKey("ctx_node", resource_id='queue_id',
client=self.fake_client, logger=None)
self.assertEqual(test_instance.properties, None)
def test_KMSKey_properties_with_key(self):
test_instance = key.KMSKey("ctx_node", resource_id='queue_id',
client=self.fake_client, logger=None)
self.fake_client.describe_key = MagicMock(
return_value={'KeyMetadata': 'z'}
)
self.assertEqual(test_instance.properties, 'z')
def test_KMSKey_enable(self):
test_instance = key.KMSKey("ctx_node", resource_id='queue_id',
client=self.fake_client, logger=None)
self.fake_client.enable_key = MagicMock(
return_value={'KeyMetadata': 'y'}
)
self.assertEqual(
test_instance.enable({'a': 'b'}),
{'KeyMetadata': 'y'}
)
self.fake_client.enable_key.assert_called_with(a='b')
def test_KMSKey_disable(self):
test_instance = key.KMSKey("ctx_node", resource_id='queue_id',
client=self.fake_client, logger=None)
self.fake_client.disable_key = MagicMock(
return_value={'KeyMetadata': 'y'}
)
self.assertEqual(
test_instance.disable({'a': 'b'}),
{'KeyMetadata': 'y'}
)
self.fake_client.disable_key.assert_called_with(a='b')
if __name__ == '__main__':
unittest.main()
| 28.42381 | 75 | 0.626906 | 669 | 5,969 | 5.273543 | 0.22571 | 0.045351 | 0.063492 | 0.063492 | 0.637188 | 0.573696 | 0.556406 | 0.540533 | 0.540533 | 0.496315 | 0 | 0.001847 | 0.27425 | 5,969 | 209 | 76 | 28.559809 | 0.812558 | 0.108393 | 0 | 0.367647 | 0 | 0 | 0.094268 | 0.011501 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.080882 | false | 0 | 0.036765 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
776c1f07ea108fe03cb9a7205323ee4e16411b3f | 10,184 | py | Python | terraref/laser3d/laser3d.py | terraref/ply2las | 90c791c64e2d94effa7d155228bf306e2f3fa29c | [
"BSD-3-Clause"
] | 1 | 2018-09-27T05:45:56.000Z | 2018-09-27T05:45:56.000Z | terraref/laser3d/laser3d.py | terraref/laser3d | 90c791c64e2d94effa7d155228bf306e2f3fa29c | [
"BSD-3-Clause"
] | 3 | 2018-05-16T19:40:26.000Z | 2018-12-14T20:02:53.000Z | terraref/laser3d/laser3d.py | terraref/ply2las | 90c791c64e2d94effa7d155228bf306e2f3fa29c | [
"BSD-3-Clause"
] | 1 | 2018-07-31T13:34:13.000Z | 2018-07-31T13:34:13.000Z | import subprocess
import numpy
import os
import laspy
from osgeo import gdal
from plyfile import PlyData, PlyElement
import matplotlib.pyplot as plt
#from rpy2.robjects import r, pandas2ri, numpy2ri
from terrautils.formats import create_geotiff
from terrautils.spatial import scanalyzer_to_mac
def ply_to_array(inp, md, utm):
"""Read PLY files into a numpy matrix.
:param inp: list of input PLY files or single file path
:param md: metadata for the PLY files
:param utm: True to return coordinates to UTM, False to return gantry fixed coordinates
:return: tuple of (x_points, y_points, z_points, utm_bounds)
"""
if not isinstance(inp, list):
inp = [inp]
scandist = float(md['sensor_variable_metadata']['scan_distance_mm'])/1000.0
scan_dir = int(md['sensor_variable_metadata']['scan_direction'])
pco = md['sensor_variable_metadata']['point_cloud_origin_m']['east']
# Create concatenated list of vertices to generate one merged LAS file
first = True
for plyf in inp:
if plyf.find("west") > -1:
curr_side = "west"
cambox = [2.070, 2.726, 1.135]
else:
curr_side = "east"
cambox = [2.070, 0.306, 1.135]
plydata = PlyData.read(plyf)
merged_x = plydata['vertex']['x']
merged_y = plydata['vertex']['y']
merged_z = plydata['vertex']['z']
# Attempt fix using math from terrautils.spatial.calculate_gps_bounds
fix_x = merged_x + cambox[0] + 0.082
if scan_dir == 0:
fix_y = merged_y + float(2.0*float(cambox[1])) - scandist/2.0 + (
-0.354 if curr_side == 'east' else -4.363)
utm_x, utm_y = scanalyzer_to_mac(
(fix_x * 0.001) + pco['x'],
(fix_y * 0.001) + pco['y']/2.0 - 0.1
)
else:
fix_y = merged_y + float(2.0*float(cambox[1])) - scandist/2.0 + (
4.2 if curr_side == 'east' else -3.43)
utm_x, utm_y = scanalyzer_to_mac(
(fix_x * 0.001) + pco['x'],
(fix_y * 0.001) + pco['y']/2.0 + 0.4
)
fix_z = merged_z + cambox[2]
utm_z = (fix_z * 0.001)+ pco['z']
# Create matrix of fixed gantry coords for TIF, but min/max of UTM coords for georeferencing
if first:
if utm:
x_pts = utm_x
y_pts = utm_y
else:
x_pts = fix_x
y_pts = fix_y
z_pts = utm_z
min_x_utm = numpy.min(utm_x)
min_y_utm = numpy.min(utm_y)
max_x_utm = numpy.max(utm_x)
max_y_utm = numpy.max(utm_y)
first = False
else:
if utm:
x_pts = numpy.concatenate([x_pts, utm_x])
y_pts = numpy.concatenate([y_pts, utm_y])
else:
x_pts = numpy.concatenate([x_pts, fix_x])
y_pts = numpy.concatenate([y_pts, fix_y])
z_pts = numpy.concatenate([z_pts, utm_z])
min_x_utm2 = numpy.min(utm_x)
min_y_utm2 = numpy.min(utm_y)
max_x_utm2 = numpy.max(utm_x)
max_y_utm2 = numpy.max(utm_y)
min_x_utm = min_x_utm if min_x_utm < min_x_utm2 else min_x_utm2
min_y_utm = min_y_utm if min_y_utm < min_y_utm2 else min_y_utm2
max_x_utm = max_x_utm if max_x_utm > max_x_utm2 else max_x_utm2
max_y_utm = max_y_utm if max_y_utm > max_y_utm2 else max_y_utm2
bounds = (min_y_utm, max_y_utm, min_x_utm, max_x_utm)
return (x_pts, y_pts, z_pts, bounds)
def generate_las_from_ply(inp, out, md, utm=True):
"""Read PLY file to array and write that array to an LAS file.
:param inp: list of input PLY files or single file path
:param out: output LAS file
:param md: metadata for the PLY files
:param utm: True to return coordinates to UTM, False to return gantry fixed coordinates
"""
(x_pts, y_pts, z_pts, bounds) = ply_to_array(inp, md, utm)
# Create header and populate with scale and offset
w = laspy.base.Writer(out, 'w', laspy.header.Header())
w.header.offset = [numpy.floor(numpy.min(y_pts)),
numpy.floor(numpy.min(x_pts)),
numpy.floor(numpy.min(z_pts))]
if utm:
w.header.scale = [.000001, .000001, .000001]
else:
w.header.scale = [1, 1, .000001]
w.set_x(y_pts, True)
w.set_y(x_pts, True)
w.set_z(z_pts, True)
w.set_header_property("x_max", numpy.max(y_pts))
w.set_header_property("x_min", numpy.min(y_pts))
w.set_header_property("y_max", numpy.max(x_pts))
w.set_header_property("y_min", numpy.min(x_pts))
w.set_header_property("z_max", numpy.max(z_pts))
w.set_header_property("z_min", numpy.min(z_pts))
w.close()
return bounds
def generate_tif_from_ply(inp, out, md, mode='max'):
"""
Create a raster (e.g. Digital Surface Map) from LAS pointcloud.
:param inp: input LAS file
:param out: output TIF file
:param md: metadata for the PLY files
:param mode: max | min | mean | idx | count | stdev (https://pdal.io/stages/writers.gdal.html)
"""
pdal_dtm = out.replace(".tif", "_dtm.json")
las_raw = out.replace(".tif", "_temp.las")
tif_raw = out.replace(".tif", "unreferenced.tif")
bounds = generate_las_from_ply(inp, las_raw, md, False)
if not os.path.exists(tif_raw):
# Generate a temporary JSON file with PDAL pipeline for conversion to TIF and execute it
with open(pdal_dtm, 'w') as dtm:
dtm.write("""{
"pipeline": [
"%s",
{
"filename":"%s",
"output_type":"%s",
"resolution": 1,
"type": "writers.gdal"
}
]
}""" % (las_raw, tif_raw, mode))
# "gdalopts": "t_srs=epsg:32612"
cmd = 'pdal pipeline %s' % pdal_dtm
subprocess.call([cmd], shell=True)
os.remove(las_raw)
# Georeference the unreferenced TIF file according to PLY UTM bounds
ds = gdal.Open(tif_raw)
px = ds.GetRasterBand(1).ReadAsArray()
#if scan_dir == 0:
# px = numpy.rot90(px, 2)
# x = numpy.fliplr(px)
create_geotiff(px, bounds, out, asfloat=True)
os.remove(tif_raw)
def las_to_height(in_file, out_file=None):
"""Return a tuple of (height histogram, max height) from an LAS file."""
number_of_bins = 500
height_hist = numpy.zeros(number_of_bins)
las_handle = laspy.file.File(in_file)
zData = las_handle.Z
if (zData.size) == 0:
return height_hist, 0
max_height = (numpy.max(zData))
height_hist = numpy.histogram(zData, bins=range(-1, number_of_bins), normed=False)[0]
if out_file:
out = open(out_file, 'w')
out.write("bin,height_cm,count\n")
for i in range(len(height_hist)):
out.write("%s,%s,%s\n" % (i+1, "%s-%s" % (i, i+1), height_hist[i]))
out.close()
return height_hist, max_height
def load_tif_vector(heightmap_tif):
"""Load heightmap geotiff into a vector for other methods."""
f = gdal.Open(heightmap_tif)
vector = numpy.array(f.GetRasterBand(1).ReadAsArray())
vector[vector == -9999.] = numpy.nan
return vector
def tif_sample(geotiff, sample_num=1000, vector=None):
"""Return random sampling of heightmap values.
vector: Use already-loaded vector instead of reloading."""
if not vector:
vector = load_tif_vector(geotiff)
return numpy.random.choice(vector[~numpy.isnan(vector)], sample_num)
def tif_mean(geotiff, vector=None):
"""Get average of geotiff values.
vector: Use already-loaded vector instead of reloading."""
if not vector:
vector = load_tif_vector(geotiff)
return numpy.nanmean(vector)
def tif_var(geotiff, vector=None):
"""Get variance of geotiff values.
vector: Use already-loaded vector instead of reloading."""
if not vector:
vector = load_tif_vector(geotiff)
return numpy.nanvar(vector)
def tif_hist(geotiff, save=False, vector=None):
"""Get histogram of geotiff values.
save: False, or a path to .png file.
vector: Use already-loaded vector instead of reloading.
"""
if not vector:
vector = load_tif_vector(geotiff)
newv = numpy.concatenate(vector, axis=0)
plt.hist(newv[~numpy.isnan(newv)], 50, normed=1, facecolor='green', alpha=0.75)
plt.xlabel('Geotiff value')
plt.ylabel('Probability')
plt.title('Histogram of Geotiff')
if save:
plt.savefig(save)
plt.close()
else:
plt.show()
def tif_fit_rleafangle(geotiff):
"""Use R to fit leaf angle."""
f = gdal.Open(geotiff)
vector = numpy.concatenate(numpy.array(f.GetRasterBand(1).ReadAsArray()), axis=0)
vector[vector == -9999.] = numpy.nan
newvector = vector[~numpy.isnan(vector)]
rstring = """
function(angles){
n <- length(angles)
betapara <- RLeafAngle::computeBeta(angles)
result <- data.frame(rbind(
c(trait = 'leaf_angle_twoparbeta',
beta1 = betapara[1],
beta2 = betapara[2],
mean = betapara[1]/(betapara[1]+betapara[2]),
variance = betapara[1]*betapara[2]/(betapara[1]+betapara[2])/(betapara[1]+betapara[2])/(betapara[1]+betapara[2]+1),
n = n)))
return(result)
}
"""
numpy2ri.activate()
rfunc = r(rstring)
r_df = rfunc(newvector)
newdf = pandas2ri.ri2py(r_df)
return newdf
def tif_fit_pyleafangle(geotiff):
"""Use Python to fit leaf angle."""
f = gdal.Open(geotiff)
vector = numpy.concatenate(numpy.array(f.GetRasterBand(1).ReadAsArray()), axis=0)
vector[vector == -9999.] = numpy.nan
newvector = vector[~numpy.isnan(vector)]
xbar = numpy.mean(newvector)
xvar = numpy.var(newvector)
alpha = (((1 - xbar) / xvar - 1) / xbar) * (xbar ^ 2)
beta = alpha * (1 / xbar - 1)
return ('leaf_angle_twoparbeta', alpha, beta, xbar, xvar)
| 34.174497 | 129 | 0.600255 | 1,463 | 10,184 | 3.993165 | 0.197539 | 0.008216 | 0.01027 | 0.018487 | 0.365457 | 0.304519 | 0.238959 | 0.219445 | 0.219445 | 0.212256 | 0 | 0.02728 | 0.276512 | 10,184 | 297 | 130 | 34.289562 | 0.765608 | 0.192655 | 0 | 0.164179 | 0 | 0.004975 | 0.147333 | 0.038667 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054726 | false | 0 | 0.044776 | 0 | 0.149254 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
776c5fd7e0466ea12b425b1c163bacd2680eebf7 | 1,721 | py | Python | tests/testutils.py | rcap107/holoclean | d4f5929a8e4d92d4f41eb058c04c96cdcb0af767 | [
"Apache-2.0"
] | 468 | 2018-11-11T15:40:12.000Z | 2022-03-30T13:21:48.000Z | tests/testutils.py | rcap107/holoclean | d4f5929a8e4d92d4f41eb058c04c96cdcb0af767 | [
"Apache-2.0"
] | 43 | 2018-11-10T20:03:49.000Z | 2020-10-20T16:39:03.000Z | tests/testutils.py | rcap107/holoclean | d4f5929a8e4d92d4f41eb058c04c96cdcb0af767 | [
"Apache-2.0"
] | 118 | 2018-11-12T19:11:42.000Z | 2022-03-23T18:25:29.000Z | import random
from psycopg2 import connect
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
def random_database():
"""
Creates a random database in the testing Postgres instance and returns the
name of the database.
"""
# Setup connection with default credentials for testing.
with connect(dbname='holo', user='holocleanuser', password='abcd1234', host='localhost') as conn:
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
with conn.cursor() as cur:
while True:
# Generate a random DB name that is not already in Postgres.
db_name = 'test_holo_{}'.format(random.randint(0, 1e6))
cur.execute("""
SELECT EXISTS(
SELECT datname FROM pg_catalog.pg_database
WHERE datname = '{db_name}'
);
""".format(db_name=db_name))
if cur.fetchall()[0][0]:
continue
cur.execute("CREATE DATABASE {db_name}".format(db_name=db_name))
return db_name
def delete_database(db_name):
with connect(dbname='holo', user='holocleanuser', password='abcd1234', host='localhost') as conn:
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
with conn.cursor() as cur:
# Terminate un-closed connections.
cur.execute("""
SELECT pid, pg_terminate_backend(pid)
FROM pg_stat_activity
WHERE datname = '{db_name}' AND pid <> pg_backend_pid();""".format(db_name=db_name))
# Drop the database.
cur.execute("DROP DATABASE IF EXISTS {db_name}".format(db_name=db_name))
| 40.97619 | 101 | 0.607786 | 200 | 1,721 | 5.045 | 0.38 | 0.095144 | 0.047572 | 0.055501 | 0.360753 | 0.342914 | 0.342914 | 0.271556 | 0.271556 | 0.271556 | 0 | 0.012397 | 0.29692 | 1,721 | 41 | 102 | 41.97561 | 0.821488 | 0.152818 | 0 | 0.285714 | 0 | 0 | 0.337979 | 0.032753 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0.071429 | 0.107143 | 0 | 0.214286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
776e3a1ebfc475a08f1ca5cdf7ac7816f0fc1b69 | 12,441 | py | Python | mainapp/models.py | mmiyaji/Dansible | 06d5ea1fce1345388e7101db33f23e6460e3c3cf | [
"MIT"
] | null | null | null | mainapp/models.py | mmiyaji/Dansible | 06d5ea1fce1345388e7101db33f23e6460e3c3cf | [
"MIT"
] | null | null | null | mainapp/models.py | mmiyaji/Dansible | 06d5ea1fce1345388e7101db33f23e6460e3c3cf | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
"""
models.py
Created by mmiyaji on 2016-07-11.
Copyright (c) 2016 ruhenheim.org. All rights reserved.
"""
from __future__ import unicode_literals
import datetime, time, uuid
from django.db import models
from django.contrib.auth import models as auth_models
class ServerAttribute(models.Model):
"""
ๅฑๆงใขใใซ
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return ServerAttribute.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = ServerAttribute.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return ServerAttribute.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = ServerAttribute.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = ServerAttribute.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/server_attribute/%s" % self.id
class Server(models.Model):
"""
ใตใผใใขใใซ
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return Server.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = Server.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return Server.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = Server.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_uuid(target_uuid):
result = None
try:
result = Server.objects.filter(uuid__exact=target_uuid)[0]
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = Server.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/server/%s" % self.id
class OSTemplate(models.Model):
"""
OSใใณใใฌใผใใขใใซ
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
server_attribute = models.ForeignKey(ServerAttribute, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return OSTemplate.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = OSTemplate.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return OSTemplate.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = OSTemplate.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = OSTemplate.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/os_template/%s" % self.id
class ConfigFile(models.Model):
"""
่จญๅฎใขใใซ
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
file_path = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_permittion = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_owner = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_group = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return ConfigFile.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = ConfigFile.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return ConfigFile.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = ConfigFile.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = ConfigFile.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/config_file/%s" % self.id
class ConfigData(models.Model):
"""
่จญๅฎใขใใซ
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
file_path = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_permittion = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_owner = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_group = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return ConfigData.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = ConfigData.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return ConfigData.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = ConfigData.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = ConfigData.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/config_data/%s" % self.id
class ConfigCommand(models.Model):
"""
่จญๅฎใขใใซ
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
command = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
command_user = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return ConfigCommand.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = ConfigCommand.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return ConfigCommand.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = ConfigCommand.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = ConfigCommand.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/config_command/%s" % self.id
class Config(models.Model):
"""
่จญๅฎใขใใซ
"""
CONFIG_CHOICES = (
('f', 'File'),
('d', 'Data'),
('c', 'Command'),
)
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
config_type = models.CharField(max_length = 10, choices=CONFIG_CHOICES, default="c", blank=False, null=False, db_index=True)
config_file = models.ForeignKey(ConfigFile, db_index=True)
config_data = models.ForeignKey(ConfigData, db_index=True)
config_command = models.ForeignKey(ConfigCommand, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return Config.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = Config.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return Config.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = Config.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = Config.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/config/%s" % self.id
class Meta:
ordering = ['-created_at']
| 33.991803 | 128 | 0.63982 | 1,507 | 12,441 | 5.112143 | 0.079628 | 0.039071 | 0.061397 | 0.077882 | 0.88188 | 0.88188 | 0.846314 | 0.836708 | 0.836708 | 0.836708 | 0 | 0.01238 | 0.246845 | 12,441 | 365 | 129 | 34.084932 | 0.809819 | 0.014951 | 0 | 0.780255 | 0 | 0 | 0.010955 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.159236 | false | 0 | 0.012739 | 0.089172 | 0.544586 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
776f426a84e27e37f4447fb48972e95dce547d74 | 162 | py | Python | lang/python/matplotlib/dyn.py | liuyang1/test | a4560e0c9ffd0bc054d55bbcf12a894ab5b7d417 | [
"MIT"
] | 8 | 2015-06-07T13:25:48.000Z | 2022-03-22T23:14:50.000Z | lang/python/matplotlib/dyn.py | liuyang1/test | a4560e0c9ffd0bc054d55bbcf12a894ab5b7d417 | [
"MIT"
] | 30 | 2016-01-29T01:36:41.000Z | 2018-09-19T07:01:22.000Z | lang/python/matplotlib/dyn.py | liuyang1/test | a4560e0c9ffd0bc054d55bbcf12a894ab5b7d417 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as p
from scipy import eye
import time
for x in xrange(3, 7):
p.imshow(eye(x))
p.show(block=False)
time.sleep(3)
print x
| 18 | 29 | 0.67284 | 30 | 162 | 3.633333 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02381 | 0.222222 | 162 | 8 | 30 | 20.25 | 0.84127 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.375 | null | null | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 3 |
77724149191f0e80307f99dd8a0656415ea312e7 | 559 | py | Python | conflictgateway/social/jobsupdate.py | sebastianlees/Conflict-Gateway | e875abb48ad4d51db90983a35a6c7bd47a54d5e9 | [
"MIT"
] | null | null | null | conflictgateway/social/jobsupdate.py | sebastianlees/Conflict-Gateway | e875abb48ad4d51db90983a35a6c7bd47a54d5e9 | [
"MIT"
] | null | null | null | conflictgateway/social/jobsupdate.py | sebastianlees/Conflict-Gateway | e875abb48ad4d51db90983a35a6c7bd47a54d5e9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from twython import Twython
import random
APP_KEY = 'APP KEY' # Customer Key here
APP_SECRET = 'APP SECRET' # Customer secret here
OAUTH_TOKEN = 'OAUTH TOKEN' # Access Token here
OAUTH_TOKEN_SECRET = 'OAUTH TOKEN SECRET' # Access Token Secret here
jobsnumber = random.randint(50, 100)
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
twitter.update_status(status="Weekly jobs update: " + str(jobsnumber) + " Mediation & Conflict Resolution jobs & scholarships added... http://www.conflictgateway.com/jobs") | 39.928571 | 172 | 0.763864 | 77 | 559 | 5.402597 | 0.454545 | 0.144231 | 0.115385 | 0.096154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010352 | 0.135957 | 559 | 14 | 172 | 39.928571 | 0.850932 | 0.182469 | 0 | 0 | 0 | 0 | 0.359823 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77725779c0f591f17fb4ed4c63a92e76bed4c7ed | 614 | py | Python | degreedClient/models/skill_plan.py | Rmaravanyika/degreedClient | 44beb91d2db56d81fa9f1f35b402076144aa124e | [
"Apache-2.0"
] | 1 | 2019-08-21T07:43:08.000Z | 2019-08-21T07:43:08.000Z | degreedClient/models/skill_plan.py | Rmaravanyika/degreedClient | 44beb91d2db56d81fa9f1f35b402076144aa124e | [
"Apache-2.0"
] | 530 | 2019-06-11T03:06:52.000Z | 2022-03-31T22:53:11.000Z | degreedClient/models/skill_plan.py | Rmaravanyika/degreedClient | 44beb91d2db56d81fa9f1f35b402076144aa124e | [
"Apache-2.0"
] | null | null | null | import arrow
import attr
from attr import attrs, attrib
@attrs
class SkillPlan(object):
id = attrib()
attributes = attrib()
links = attrib()
@attrs
class SkillPlanAttribute(object):
name = attrib()
description = attrib()
visibility = attrib()
sections = attrib(default=None)
created_at = attr.ib(converter=attr.converters.optional(arrow.get), default=None)
@attrs
class SkillFollower(object):
id = attrib()
attributes = attrib()
links = attrib()
relationships = attrib()
@attrs
class SkillFollowerAttribute(object):
employee_id = attrib()
enrolled_at = attrib()
is_primary_plan = attrib()
| 16.594595 | 82 | 0.732899 | 71 | 614 | 6.267606 | 0.478873 | 0.089888 | 0.107865 | 0.107865 | 0.18427 | 0.18427 | 0.18427 | 0 | 0 | 0 | 0 | 0 | 0.149837 | 614 | 36 | 83 | 17.055556 | 0.85249 | 0 | 0 | 0.384615 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.115385 | 0 | 0.846154 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
77729e809d9ad4a191008903e41b3ead3c5eac97 | 273 | py | Python | src/utils.py | HuiiBuh/checkers-master | 112eb1df1d8b0d691edd82978945ea5527b75fab | [
"MIT"
] | 1 | 2021-09-04T05:34:51.000Z | 2021-09-04T05:34:51.000Z | src/utils.py | HuiiBuh/checkers-master | 112eb1df1d8b0d691edd82978945ea5527b75fab | [
"MIT"
] | null | null | null | src/utils.py | HuiiBuh/checkers-master | 112eb1df1d8b0d691edd82978945ea5527b75fab | [
"MIT"
] | null | null | null | import numpy as np
import base64
import pickle
def img2str(image):
imdata = pickle.dumps(image)
return base64.b64encode(imdata).decode('ascii')
def str2img(string):
image = base64.b64decode(string)
image = pickle.loads(image)
return np.array(image)
| 18.2 | 51 | 0.714286 | 36 | 273 | 5.416667 | 0.555556 | 0.112821 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.053571 | 0.179487 | 273 | 14 | 52 | 19.5 | 0.816964 | 0 | 0 | 0 | 0 | 0 | 0.018315 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.3 | 0 | 0.7 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
777390888167fc8811e643a135d220bdf95baecc | 3,457 | py | Python | experiment_utils/initialization_utils.py | HanGuo97/experiment-utils | ff1288d4ab89af90e5c8c486de4dd7673a99079b | [
"MIT"
] | null | null | null | experiment_utils/initialization_utils.py | HanGuo97/experiment-utils | ff1288d4ab89af90e5c8c486de4dd7673a99079b | [
"MIT"
] | null | null | null | experiment_utils/initialization_utils.py | HanGuo97/experiment-utils | ff1288d4ab89af90e5c8c486de4dd7673a99079b | [
"MIT"
] | null | null | null | import os
import click
from git import Repo
from absl import logging
from datetime import datetime
from collections import namedtuple
from typing import Tuple, Optional
from . import wandb_utils
logging.set_verbosity(logging.INFO)
ExperimentConfig = namedtuple(
"ExperimentConfig", (
"project_name",
"experiment_tag",
"experiment_name",
"experiment_description",
"experiment_dir",
"repo"))
CONFIG: Optional[ExperimentConfig] = None
LOG_STRING = click.style("Experiment", fg="blue", bold=True)
def interactive_initialize(
base_experiment_dir: str=".",
default_project_name: Optional[str]=None,
default_experiment_tag: Optional[str]=None,
default_experiment_description: Optional[str]=None,
initialize_wandb: bool=True
) -> ExperimentConfig:
if not isinstance(base_experiment_dir, str):
raise ValueError("`base_experiment_dir` must be String")
# Set file-scope configuration
global CONFIG
# Query user inputs
project_name = click.prompt(
"Please Enter The Project Name",
type=str, default=default_project_name)
experiment_tag = click.prompt(
"Please Enter The Experiment Name",
type=str, default=default_experiment_tag)
experiment_description = click.prompt(
"Please Enter The Experiment Description",
type=str, default=default_experiment_description)
# Project Name should be in camel-case
project_name = to_PascalCase(project_name)
# Experimetn Tag should be in camel-case
experiment_tag = to_PascalCase(experiment_tag)
# Experiment Name will include more info
experiment_name, git_repo = _get_experiment_name(tag=experiment_tag)
# Experiment log dir
experiment_dir = os.path.join(base_experiment_dir, experiment_name)
CONFIG = ExperimentConfig(
repo=git_repo,
project_name=project_name,
experiment_tag=experiment_tag,
experiment_dir=experiment_dir,
experiment_name=experiment_name,
experiment_description=experiment_description)
_print_config()
# Confirm is the directory already exists
if os.path.exists(CONFIG.experiment_dir):
click.confirm(click.style(
"Experiment Dir already exists, continue?", fg="red"), abort=True)
# Confirm
click.confirm("Do you want to continue?", abort=True)
if initialize_wandb:
wandb_utils.wandb_initialize(
project_name=project_name,
experiment_name=experiment_name,
experiment_tag=experiment_tag,
experiment_notes=experiment_description)
return CONFIG
def _get_experiment_name(tag: Optional[str]=None) -> Tuple[str, Repo]:
"""Get the experiment name based on Git status and time"""
repo = Repo("./")
date = datetime.now()
name = (
f"{date.year}{date.month}{date.day}_"
f"BRANCH_{repo.active_branch}_"
f"COMMIT_{repo.head.commit.hexsha[:5]}_"
f"TAG_{tag}")
return name, repo
def _print_config() -> None:
if CONFIG is None:
return
# Maximum field lengths
max_length = max(map(len, CONFIG._fields)) + 1
for i in range(len(CONFIG)):
click.echo(f"{LOG_STRING} "
f"{CONFIG._fields[i]: <{max_length}}: "
f"{CONFIG[i]}")
def to_PascalCase(s: str) -> str:
return "".join(x for x in s.title() if not x.isspace())
| 29.29661 | 78 | 0.676598 | 413 | 3,457 | 5.445521 | 0.2954 | 0.058693 | 0.081814 | 0.032014 | 0.249 | 0.066696 | 0 | 0 | 0 | 0 | 0 | 0.000751 | 0.22939 | 3,457 | 117 | 79 | 29.547009 | 0.843468 | 0.087938 | 0 | 0.075 | 0 | 0 | 0.154508 | 0.045237 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.1 | 0.0125 | 0.2 | 0.025 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77751b2395c16d4aa8161f9896e4bd62cf296098 | 1,045 | py | Python | MDD.py | Asteria-Sun/Some-Short-Programs | df0b1a0f5fc838efbdb88da91ad653026044e23a | [
"Apache-2.0"
] | null | null | null | MDD.py | Asteria-Sun/Some-Short-Programs | df0b1a0f5fc838efbdb88da91ad653026044e23a | [
"Apache-2.0"
] | null | null | null | MDD.py | Asteria-Sun/Some-Short-Programs | df0b1a0f5fc838efbdb88da91ad653026044e23a | [
"Apache-2.0"
] | null | null | null | '''ๆๅคงๅๆค่ฎก็ฎ'''
import numpy as np
import pandas as pd
import datetime
#้ๆบ่ถๅฟ+ๆ็ปญไธๆถจ็่ถๅฟ
price = 2 + np.random.randn(21) + np.linspace(0,2,21)
#่ฎพ็ฝฎๆฅๆ
date = [datetime.date(2020,12,x) for x in range(2,23)]
MDD_df = pd.DataFrame({'date':date,'price':price})
#็ฎๅๆถ็็ๆฒกๅ1ๆฏไธบไบๆนไพฟ่ฟไน
MDD_df['return+1'] = MDD_df['price']/MDD_df['price'].shift(1)
MDD_df['cum_return'] = MDD_df['return+1'].cumprod()
MDD_df = MDD_df.dropna(axis=0, how='any')
MDD_df = MDD_df.reset_index(drop = True)
MDD_df['return_max'],MDD_df['return_min'] = MDD_df['cum_return'],MDD_df['cum_return']
l=len(MDD_df)
#ๆฑๅฝๅๆๅคง/ๆๅฐ็ดฏ็งฏๆถ็็
for i in range(1,l):
if MDD_df['return_max'][i] < MDD_df['return_max'][i-1]:
MDD_df['return_max'][i] = MDD_df['return_max'][i-1]
if MDD_df['return_min'][i] > MDD_df['return_min'][i-1]:
MDD_df['return_min'][i] = MDD_df['return_min'][i-1]
#ๆฑๆๅคงๅทฎๅผ๏ผๅฟ
็ถๅบ็ฐๅจๅฝๅๆๅคง็ดฏ็งฏๆถ็ๅๆๅฐ็ดฏ็งฏๆถ็ไน้ด๏ผ
MDD_df['spread_max'] = MDD_df['return_max'] - MDD_df['return_min']
#ๆ นๆฎๅฎไนๆฑๅๆค็
MDD_df['d_rate'] = MDD_df['spread_max']/MDD_df['return_max']
#ๆๅคงๅๆค็
mdd = MDD_df['d_rate'].max()
| 33.709677 | 85 | 0.687081 | 192 | 1,045 | 3.473958 | 0.302083 | 0.224888 | 0.247376 | 0.146927 | 0.389805 | 0.389805 | 0.332834 | 0.332834 | 0.185907 | 0.185907 | 0 | 0.027778 | 0.104306 | 1,045 | 30 | 86 | 34.833333 | 0.684829 | 0.088995 | 0 | 0 | 0 | 0 | 0.244681 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.15 | 0 | 0.15 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
77752e8a5315061629304e6dc5ff4ce54a7a1a75 | 1,980 | py | Python | src/MAT/bin/MATManagePluginDirs_tpl.py | wake-forest-ctsi/mist-toolkit | 857e91976fa3b75ef2cad08612fa79cf2f743615 | [
"BSD-3-Clause"
] | 2 | 2015-10-28T17:58:31.000Z | 2021-10-12T10:34:39.000Z | scrubber/MIST_2_0_4/src/MAT/bin/MATManagePluginDirs_tpl.py | manaswini18/DmD | dd1e865ddb7b43c8478b2b5733385143b1980951 | [
"Apache-2.0"
] | null | null | null | scrubber/MIST_2_0_4/src/MAT/bin/MATManagePluginDirs_tpl.py | manaswini18/DmD | dd1e865ddb7b43c8478b2b5733385143b1980951 | [
"Apache-2.0"
] | 9 | 2016-12-17T22:50:37.000Z | 2020-09-26T01:08:06.000Z | #!MF_PYTHONBIN
# Copyright (C) 2007 - 2009 The MITRE Corporation. See the toplevel
# file LICENSE for license terms.
import os, sys, glob, shutil
MAT_PKG_PYLIB = "MF_MAT_PKG_PYLIB"
sys.path.insert(0, MAT_PKG_PYLIB)
MAT_PKG_HOME = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
import MAT
def Usage():
print "Usage: MATManagePluginDirs [ install | remove ] app_dir ..."
print " MATManagePluginDirs list"
sys.exit(1)
if len(sys.argv) < 2:
Usage()
if sys.argv[1] in ["install", "remove"]:
if len(sys.argv) < 3:
Usage()
mgr = MAT.PluginMgr.PluginDirMgr()
if sys.argv[1] == "install":
meth = mgr.installPluginDir
else:
meth = mgr.uninstallPluginDir
for appDir in sys.argv[2:]:
try:
meth(appDir, verbose = True)
except MAT.PluginMgr.PluginError, e:
print "Warning:", str(e)
print "Skipping %s." % appDir
elif sys.argv[1] == "list":
mgr = MAT.PluginMgr.PluginDirMgr()
mgr.read()
d = MAT.PluginMgr.LoadPlugins()
byDir = {}
for k, v in d.items():
try:
byDir[v.taskRoot].append(k)
except KeyError:
byDir[v.taskRoot] = [k]
for prefix, fullPath in mgr.dirPairs:
if prefix != "":
continue
whatsThere = []
if os.path.exists(os.path.join(fullPath, "task.xml")):
localTasks = byDir.get(fullPath)
if localTasks is None:
whatsThere.append("task (none visible)")
elif len(localTasks) > 1:
whatsThere.append("tasks: " + ", ".join(["'" + t + "'" for t in localTasks]))
else:
whatsThere.append("task: '" + localTasks[0] + "'")
if os.path.exists(os.path.join(fullPath, "demo.xml")):
whatsThere.append("demo")
print fullPath, "("+"; ".join(whatsThere)+")"
else:
print "Operation '%s' unknown." % sys.argv[1]
Usage()
| 29.117647 | 93 | 0.569697 | 237 | 1,980 | 4.696203 | 0.396624 | 0.037736 | 0.028751 | 0.026954 | 0.086253 | 0.057502 | 0.057502 | 0.057502 | 0 | 0 | 0 | 0.013447 | 0.286364 | 1,980 | 67 | 94 | 29.552239 | 0.774239 | 0.056061 | 0 | 0.188679 | 0 | 0 | 0.126073 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.037736 | null | null | 0.113208 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
777555e78b559e5e4b478451278bf7a1b55935a1 | 1,071 | py | Python | Strategy/python/strategy.py | JaviMiot/patternDesignJS_Python | 110cd02320e485c17f7d3b6bbafc4ffa53ae974c | [
"MIT"
] | null | null | null | Strategy/python/strategy.py | JaviMiot/patternDesignJS_Python | 110cd02320e485c17f7d3b6bbafc4ffa53ae974c | [
"MIT"
] | null | null | null | Strategy/python/strategy.py | JaviMiot/patternDesignJS_Python | 110cd02320e485c17f7d3b6bbafc4ffa53ae974c | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
class Operation(ABC):
@abstractmethod
def calculate(self, number1: float, number2: float) -> float:
pass
class Sum(Operation):
def calculate(self, number1: float, number2: float) -> float:
return number1 + number2
class Rest(Operation):
def calculate(self, number1: float, number2: float) -> float:
return number1 - number2
class MathOperation(ABC):
def __init__(self, instance: Operation):
self.__instance = instance
@property
def instance(self):
return self.__instance
@instance.setter
def instance(self, instance: Operation):
self.__instance = instance
def calculate(self, number1: float, number2: float) -> float:
return self.__instance.calculate(number1, number2)
if __name__ == '__main__':
mathOperators = MathOperation(Sum())
print(mathOperators.instance)
print(mathOperators.calculate(1, 2))
mathOperators.instance = Rest()
print(mathOperators.instance)
print(mathOperators.calculate(1, 2))
| 23.282609 | 65 | 0.68254 | 113 | 1,071 | 6.292035 | 0.247788 | 0.101266 | 0.090014 | 0.129395 | 0.627286 | 0.627286 | 0.511955 | 0.511955 | 0.293952 | 0.222222 | 0 | 0.021403 | 0.214753 | 1,071 | 45 | 66 | 23.8 | 0.824019 | 0 | 0 | 0.344828 | 0 | 0 | 0.00747 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.241379 | false | 0.034483 | 0.034483 | 0.137931 | 0.551724 | 0.137931 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 3 |
7775682dcc6dc253e667940b371c684ba9d3f2b3 | 1,295 | py | Python | Kiraro/Kiraro_Voice/Say.py | NotJakeR/Kiraro-Discord-Bot | de4f765eedd92cedcb7c61b0444e8f69e905f9e6 | [
"Apache-2.0"
] | null | null | null | Kiraro/Kiraro_Voice/Say.py | NotJakeR/Kiraro-Discord-Bot | de4f765eedd92cedcb7c61b0444e8f69e905f9e6 | [
"Apache-2.0"
] | null | null | null | Kiraro/Kiraro_Voice/Say.py | NotJakeR/Kiraro-Discord-Bot | de4f765eedd92cedcb7c61b0444e8f69e905f9e6 | [
"Apache-2.0"
] | 1 | 2021-01-25T19:06:17.000Z | 2021-01-25T19:06:17.000Z | from Kiraro import bot
from discord.ext import commands
from discord.errors import *
from discord.utils import get
from Kiraro.Kiraro_Voice import idk, queue
import discord
import gtts
queue_list = {}
@bot.command()
async def say(ctx, *, word):
boolean, voice = await idk(ctx)
if boolean:
tts = gtts.gTTS(word)
tts.save("Voice_Files/TTS.wav")
try:
voice.play(discord.FFmpegPCMAudio("Voice_Files/TTS.wav")) #, after=lambda e: queue(ctx, queue_list, voice))
voice.source = discord.PCMVolumeTransformer(voice.source)
except ClientException:
queue_list[ctx.guild.id].append(word)
print(queue_list)
@say.error
async def say_error(ctx, error):
if isinstance(error, discord.HTTPException):
await ctx.send("Something went wrong, try again later")
elif isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(
title="Say",
description="To use the Say command just add the text",
color=discord.Color.blue()
)
embed.set_author(name=ctx.author, icon_url=ctx.author.avatar_url)
embed.add_field(name="Usage", value="Say `message` ")
await ctx.send(embed=embed)
else:
print(F"Say Error {error}") | 33.205128 | 119 | 0.658687 | 167 | 1,295 | 5.035928 | 0.45509 | 0.042806 | 0.026159 | 0.03805 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.23166 | 1,295 | 39 | 120 | 33.205128 | 0.845226 | 0.037066 | 0 | 0 | 0 | 0 | 0.123496 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.057143 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
7777e8520a12559d29d6241cbcb36e71e0940302 | 7,056 | py | Python | simple_estimator.py | carlos9310/models | d5038337a42544b95d6bf97e40099ef140399b89 | [
"Apache-2.0"
] | 2 | 2020-09-14T02:15:17.000Z | 2021-09-28T07:08:32.000Z | simple_estimator.py | carlos9310/models | d5038337a42544b95d6bf97e40099ef140399b89 | [
"Apache-2.0"
] | null | null | null | simple_estimator.py | carlos9310/models | d5038337a42544b95d6bf97e40099ef140399b89 | [
"Apache-2.0"
] | 1 | 2022-01-04T06:55:54.000Z | 2022-01-04T06:55:54.000Z | # -*- coding:utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.INFO)
ROOT_PATH = '/tmp/census_data'
TRAIN_PATH = '/tmp/census_data/adult.data'
EVAL_PATH = '/tmp/census_data/adult.test'
PREDICT_PATH = '/content/models/official/r1/wide_deep/census_test.csv'
MODEL_PATH = '/tmp/adult_model'
EXPORT_PATH = '/tmp/adult_export_model'
_CSV_COLUMNS = [
'age', 'workclass', 'fnlwgt', 'education', 'education_num',
'marital_status', 'occupation', 'relationship', 'race', 'gender',
'capital_gain', 'capital_loss', 'hours_per_week', 'native_country',
'income_bracket'
]
_CSV_COLUMN_DEFAULTS = [[0], [''], [0], [''], [0], [''], [''], [''], [''], [''],
[0], [0], [0], [''], ['']]
_HASH_BUCKET_SIZE = 1000
_NUM_EXAMPLES = {
'train': 32561,
'validation': 16281,
}
def build_model_columns():
"""Builds a set of wide and deep feature columns."""
# Continuous variable columns
age = tf.feature_column.numeric_column('age')
education_num = tf.feature_column.numeric_column('education_num')
capital_gain = tf.feature_column.numeric_column('capital_gain')
capital_loss = tf.feature_column.numeric_column('capital_loss')
hours_per_week = tf.feature_column.numeric_column('hours_per_week')
education = tf.feature_column.categorical_column_with_vocabulary_list(
'education', [
'Bachelors', 'HS-grad', '11th', 'Masters', '9th', 'Some-college',
'Assoc-acdm', 'Assoc-voc', '7th-8th', 'Doctorate', 'Prof-school',
'5th-6th', '10th', '1st-4th', 'Preschool', '12th'])
marital_status = tf.feature_column.categorical_column_with_vocabulary_list(
'marital_status', [
'Married-civ-spouse', 'Divorced', 'Married-spouse-absent',
'Never-married', 'Separated', 'Married-AF-spouse', 'Widowed'])
relationship = tf.feature_column.categorical_column_with_vocabulary_list(
'relationship', [
'Husband', 'Not-in-family', 'Wife', 'Own-child', 'Unmarried',
'Other-relative'])
workclass = tf.feature_column.categorical_column_with_vocabulary_list(
'workclass', [
'Self-emp-not-inc', 'Private', 'State-gov', 'Federal-gov',
'Local-gov', '?', 'Self-emp-inc', 'Without-pay', 'Never-worked'])
# To show an example of hashing:
occupation = tf.feature_column.categorical_column_with_hash_bucket(
'occupation', hash_bucket_size=_HASH_BUCKET_SIZE)
# Transformations.
age_buckets = tf.feature_column.bucketized_column(
age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
# Wide columns and deep columns.
base_columns = [
education, marital_status, relationship, workclass, occupation,
age_buckets,
]
crossed_columns = [
tf.feature_column.crossed_column(
['education', 'occupation'], hash_bucket_size=_HASH_BUCKET_SIZE),
tf.feature_column.crossed_column(
[age_buckets, 'education', 'occupation'],
hash_bucket_size=_HASH_BUCKET_SIZE),
]
wide_columns = base_columns + crossed_columns
deep_columns = [
age,
education_num,
capital_gain,
capital_loss,
hours_per_week,
tf.feature_column.indicator_column(workclass),
tf.feature_column.indicator_column(education),
tf.feature_column.indicator_column(marital_status),
tf.feature_column.indicator_column(relationship),
# To show an example of embedding
tf.feature_column.embedding_column(occupation, dimension=8),
]
return wide_columns, deep_columns
def input_fn(data_path, shuffle, num_epochs, batch_size):
"""Generate an input function for the Estimator."""
def parse_csv(value):
tf.logging.info('Parsing {}'.format(data_path))
columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS)
features = dict(zip(_CSV_COLUMNS, columns))
labels = features.pop('income_bracket')
tf.logging.info(f'labels:{labels}')
classes = tf.equal(labels, '>50K') # binary classification
return features, classes
# Extract lines from input files using the Dataset API.
dataset = tf.data.TextLineDataset(data_path)
if shuffle:
dataset = dataset.shuffle(buffer_size=_NUM_EXAMPLES['train'])
dataset = dataset.map(parse_csv, num_parallel_calls=5)
# We call repeat after shuffling, rather than before, to prevent separate
# epochs from blending together.
dataset = dataset.repeat(num_epochs)
dataset = dataset.batch(batch_size)
return dataset
# estimator.train()ๅฏไปฅๅพช็ฏ่ฟ่ก๏ผๆจกๅ็็ถๆๅฐๆไน
ไฟๅญๅจmodel_dir
def run():
wide_columns, deep_columns = build_model_columns()
# os.system('rm -rf {}'.format(MODEL_PATH))
config = tf.estimator.RunConfig(save_checkpoints_steps=100)
estimator = tf.estimator.DNNLinearCombinedClassifier(model_dir=MODEL_PATH,
linear_feature_columns=wide_columns,
linear_optimizer=tf.train.FtrlOptimizer(learning_rate=0.01),
dnn_feature_columns=deep_columns,
dnn_hidden_units=[256, 64, 32, 16],
dnn_optimizer=tf.train.AdamOptimizer(learning_rate=0.001),
config=config)
# Linear model.
# estimator = tf.estimator.LinearClassifier(feature_columns=wide_columns, n_classes=2,
# optimizer=tf.train.FtrlOptimizer(learning_rate=0.03))
# Train the model.
estimator.train(
input_fn=lambda: input_fn(data_path=TRAIN_PATH, shuffle=True, num_epochs=40, batch_size=100), steps=2000)
"""
steps: ๆๅคง่ฎญ็ปๆฌกๆฐ๏ผๆจกๅ่ฎญ็ปๆฌกๆฐ็ฑ่ฎญ็ปๆ ทๆฌๆฐ้ใnum_epochsใbatch_sizeๅ
ฑๅๅณๅฎ๏ผ้่ฟstepsๅฏไปฅๆๅๅๆญข่ฎญ็ป
"""
# Evaluate the model.
eval_result = estimator.evaluate(
input_fn=lambda: input_fn(data_path=EVAL_PATH, shuffle=False, num_epochs=1, batch_size=40))
print('Test set accuracy:', eval_result)
# Predict.
pred_dict = estimator.predict(
input_fn=lambda: input_fn(data_path=PREDICT_PATH, shuffle=False, num_epochs=1, batch_size=40))
for pred_res in pred_dict:
print(pred_res['probabilities'][1])
columns = wide_columns + deep_columns
feature_spec = tf.feature_column.make_parse_example_spec(feature_columns=columns)
serving_input_fn = tf.estimator.export.build_parsing_serving_input_receiver_fn(feature_spec)
estimator.export_savedmodel(EXPORT_PATH, serving_input_fn)
if __name__ == '__main__':
run() | 40.090909 | 118 | 0.639172 | 794 | 7,056 | 5.367758 | 0.338791 | 0.040122 | 0.06687 | 0.025809 | 0.266072 | 0.186298 | 0.163069 | 0.104176 | 0.017363 | 0 | 0 | 0.018438 | 0.24674 | 7,056 | 176 | 119 | 40.090909 | 0.783443 | 0.10771 | 0 | 0.016807 | 0 | 0 | 0.152771 | 0.025129 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033613 | false | 0 | 0.05042 | 0 | 0.109244 | 0.02521 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
7777fe549507b7986267faec6ed4cc83a2313604 | 288 | py | Python | Python/Mundo1/TratandoDados/ex008.py | ysabelah/Estudos | aa587f1981324640c93978b3b80fdf3abde10e80 | [
"MIT"
] | null | null | null | Python/Mundo1/TratandoDados/ex008.py | ysabelah/Estudos | aa587f1981324640c93978b3b80fdf3abde10e80 | [
"MIT"
] | null | null | null | Python/Mundo1/TratandoDados/ex008.py | ysabelah/Estudos | aa587f1981324640c93978b3b80fdf3abde10e80 | [
"MIT"
] | null | null | null | #Escreva um programa que leia um valor em metros e o exiba convertido em centimetros e milimetros
numero = float(input('Digite o nรบmero: '))
print('O valor de {} em centรญmetros รฉ: {}'.format(numero, numero * 100))
print('O valor de {} em milรญmetros รฉ: {}'.format(numero, numero * 1000)) | 48 | 97 | 0.711806 | 45 | 288 | 4.555556 | 0.6 | 0.058537 | 0.107317 | 0.126829 | 0.146341 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028926 | 0.159722 | 288 | 6 | 98 | 48 | 0.818182 | 0.333333 | 0 | 0 | 0 | 0 | 0.4375 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.666667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
7778e854b1769ba9428d3a4d3170d1002d65dedd | 2,385 | py | Python | examples/run_multi_functions_parallel.py | rishavpramanik/mealpy | d4a4d5810f15837764e4ee61517350fef3dc92b3 | [
"MIT"
] | null | null | null | examples/run_multi_functions_parallel.py | rishavpramanik/mealpy | d4a4d5810f15837764e4ee61517350fef3dc92b3 | [
"MIT"
] | null | null | null | examples/run_multi_functions_parallel.py | rishavpramanik/mealpy | d4a4d5810f15837764e4ee61517350fef3dc92b3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Created by "Thieu" at 10:26, 02/03/2022 ----------%
# Email: nguyenthieu2102@gmail.com %
# Github: https://github.com/thieu1995 %
# --------------------------------------------------%
import concurrent.futures as parallel
from functools import partial
from opfunu.cec_basic import cec2014_nobias
from pandas import DataFrame
from mealpy.evolutionary_based.DE import BaseDE
from os import getcwd, path, makedirs
PATH_RESULTS = "history/results/"
check_dir = f"{getcwd()}/{PATH_RESULTS}"
if not path.exists(check_dir):
makedirs(check_dir)
model_name = "DE"
n_dims = 30
func_names = ["F1", "F2", "F3", "F4", "F5", "F6", "F7", "F8", "F9", "F10", "F11", "F12", "F13", "F14", "F15", "F16", "F17", "F18", "F19"]
def find_minimum(function_name, n_dims):
print(f"Start running: {function_name}")
problem = {
"fit_func": getattr(cec2014_nobias, function_name),
"lb": [-100, ] * n_dims,
"ub": [100, ] * n_dims,
"minmax": "min",
"verbose": True,
}
model = BaseDE(problem, epoch=10, pop_size=50, wf=0.8, cr=0.9, name=model_name, fit_name=function_name)
_, best_fitness = model.solve()
print(f"Finish function: {function_name}")
return {
"func_name": function_name,
"best_fit": [best_fitness],
"error": model.history.list_global_best_fit
}
if __name__ == '__main__':
## Run model
best_fit_full = {}
best_fit_columns = []
error_full = {}
error_columns = []
with parallel.ProcessPoolExecutor() as executor:
results = executor.map(partial(find_minimum, n_dims=n_dims), func_names)
for result in results:
error_full[result["func_name"]] = result["error"]
error_columns.append(result["func_name"])
best_fit_full[result["func_name"]] = result["best_fit"]
best_fit_columns.append(result["func_name"])
df_err = DataFrame(error_full, columns=error_columns)
df_err.to_csv(f"{PATH_RESULTS}{n_dims}D_{model_name}_error.csv", header=True, index=False)
df_fit = DataFrame(best_fit_full, columns=best_fit_columns)
df_fit.to_csv(f"{PATH_RESULTS}{n_dims}D_{model_name}_best_fit.csv", header=True, index=False)
| 36.692308 | 137 | 0.6 | 301 | 2,385 | 4.468439 | 0.438538 | 0.052045 | 0.041636 | 0.02974 | 0.157621 | 0.047584 | 0.047584 | 0.047584 | 0.047584 | 0.047584 | 0 | 0.040243 | 0.239413 | 2,385 | 64 | 138 | 37.265625 | 0.701213 | 0.165199 | 0 | 0 | 0 | 0 | 0.178931 | 0.060484 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021739 | false | 0 | 0.130435 | 0 | 0.173913 | 0.043478 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77798b6704167fd268a08dbc9c3f502cb2811a76 | 9,876 | py | Python | table_sorter.py | manawesome326/table-sorter | 53c6b0cdcf6b70216e1bf0a69bdd6b193cf64604 | [
"Unlicense"
] | null | null | null | table_sorter.py | manawesome326/table-sorter | 53c6b0cdcf6b70216e1bf0a69bdd6b193cf64604 | [
"Unlicense"
] | null | null | null | table_sorter.py | manawesome326/table-sorter | 53c6b0cdcf6b70216e1bf0a69bdd6b193cf64604 | [
"Unlicense"
] | null | null | null | import random
import math
import time
import copy
allstudents = []
class Student:
def __init__(self,name,friends,robot=False):
self.name = name
self.__name__ = name
self.robot = robot
#self.friends = friends
self.friends = friends
self.currenttable = 0
allstudents.append(self)
################################################################
#ignore anything above this line
#example students
#add new ones by pasting more below here with this exact syntax
#make sure to get the names right!
#the first name is the person's name. The stuff in the curly brackets is the other people,
#and how much this person wants to be in a group with them.
#For example here greg has given jordan a rating of -1, and alexandria a rating of -0.99
#I might be able to help you convert other formats into this if you need it. I admit it's not the most convenient.
Student("greg",{"jordan":-1,"alexandria":-0.99})
Student("jordan",{"greg":1,"alexandria":100})
Student("alexandria",{"greg":0.2,"jordan":1})
#and so on
#students are allowed to set a preference for themselves, but this has no effect.
#Students do not need to give a preference to every user - unknowns default to...
value_of_unknown_people = 0 #If you want people to meet new people, set this higher! Lower if being friends is useful.
#This is a completely untested feature. 0 is probably a good value. Anything more than 20% of the range people are setting preferences in is probably very silly.
#Preferences can be any number, including negatives and decimals.
#Use of precise decimals prevents some minor weirdness that (might) worsen your results
#So ask your respondents to be as precise as they like in their rankings!
#Failing that, set the below value to True to run some code to jitter the values a little...
jitter = False
#Negative numbers are treated differently by the code - ask people to only use them on people they really do not want to share a group with!
#student generator
#used to generate people ("robots") at random
#I use this for testing, but I don't think you'll need it? Best just leave it on 0.
robots_to_add = 0
total_tables =20 #set to the number of tables, of course
t_pop = 5 #and how many people go on each table
#note: the program will crash if there isn't enough room on the tables!
#However, it is fine to have too many tables for the person count!
goes = 10 #Adjust this value upwards if the program finishes too fast!
#A good 5 minute run will help find the best groups, especially if there's a lot of people.
#On the other hand, if it seems to be taking a very long time, lower the value and accept that things won't quite be perfect.
#No matter how many attempts you take and thus what result you get, it'll be very unlikely that any singular swap of people will improve the rating of it.
#no more config options are below this line.
#################################################################
for i in range(robots_to_add):
Student("robot " + str(i),{},True)
tables = []
for i in range(total_tables):
tables.append([].copy())
count = 0
ghostly_hatred = {}
for thing in allstudents:
if thing.robot:
for i in range(10):
this = random.choice(allstudents)
thing.friends[this] = random.randint(-10,10)
print(thing.name + " thinks " + this.name + " is worth " + str(thing.friends[this]))
#only used for the robot people
else:
tempdict = {}
for key in thing.friends.keys():
tempdict[[x for x in allstudents if x.name == key][0]] = thing.friends[key]
print(thing.name + " thinks " + [x for x in allstudents if x.name == key][0].name + " is worth " + str(tempdict[[x for x in allstudents if x.name == key][0]]))
thing.friends = tempdict
tables[math.floor(count/t_pop)].append(thing)
thing.currenttable = math.floor(count/t_pop)
count += 1
ghostly_hatred[thing] = -0.1 #everybody slightly dislikes ghosts. Actually, I don't know what this does, I wrote this code ages ago.
for table in tables:
while len(table) < t_pop:
table.append(Student("a ghost!",ghostly_hatred))
#ghosts are added to tables that aren't full.
#I didn't consider having one big group instead of one small one,
#so if you need that, probably just have your extra people pick new groups on their own
for table in tables:
print("Table " + str(tables.index(table)) + ": ")
for person in table:
print(person.name)
#this section does something which probably leads to better results:
#if person A gives person B a positive rating, but person B gives person A a negative rating,
#Person A's rating of person B is set to half of person B's rating of person A.
#The effect of this is that a malicious person who wishes to be on the same table as somebody who wants to avoid them
#is less likely to succeed in this. But they won't be separated as strongly as two people who *both* hate eachother!
for student in allstudents:
for friend in student.friends.keys():
try:
if (student.friends[friend] < 0) and (friend.friends[student] >= 0):
friend.friends[student] = student.friends[friend]/2
print(friend.name + " has decided " + student.name + " is actually only worth " + str(friend.friends[student]))
except KeyError:
friend.friends[student] = student.friends[friend]/2
print(friend.name + " has never heard of " + student.name + ", but now dislikes them with a value of " + str(friend.friends[student]))
if jitter:
for student in allstudents:
for friend in student.friends.keys():
student.friends[friend] = student.friends[friend] + random.uniform(0.00001,0.00002)
def score_eval(tables):
score = 0
for table in tables:
tablescore = 0
for student in table:
#print(student.name)
for partner in table:
try:
score += student.friends[partner]
tablescore += student.friends[partner]
except KeyError:
score += value_of_unknown_people
tablescore += value_of_unknown_people
return score
print("Basic happiness: " + str(score_eval(tables)))
initial_tables = copy.deepcopy(tables)
def test(no_leeching, randomer):
world_record = score_eval(initial_tables)
give_up = 0
record_breaks = 0
maximum_swaps = int(t_pop*t_pop*(math.factorial(total_tables)/(math.factorial(total_tables-2)*2)))
print("maximum should be " +str(maximum_swaps))
maximum_boredom = maximum_swaps*2
for i in range(goes):
print("starting attempt " + str(i))
tables = copy.deepcopy(initial_tables)
give_up = 0
#randoming
if randomer:
for i in range(maximum_swaps*4):
swap_table_1 = random.choice(tables)
swap_table_2 = random.choice(tables)
while swap_table_2 == swap_table_1:
swap_table_2 = random.choice(tables)
swap_student_1 = random.randint(0,len(swap_table_1)-1)
swap_student_2 = random.randint(0,len(swap_table_2)-1)
swap_table_1[swap_student_1], swap_table_2[swap_student_2] = swap_table_2[swap_student_2], swap_table_1[swap_student_1]
print("starting happiness: " + str(score_eval(tables)))
boredom = 0
attempts = []
while True:
while True:
swap_table_1 = random.choice(tables)
swap_table_2 = random.choice(tables)
while swap_table_2 == swap_table_1:
swap_table_2 = random.choice(tables)
swap_student_1 = random.randint(0,len(swap_table_1)-1)
swap_student_2 = random.randint(0,len(swap_table_2)-1)
if not (swap_table_1[swap_student_1].name + swap_table_2[swap_student_2].name in attempts):
break
current_score = score_eval(tables)
if no_leeching:
current_s_1 = score_eval([swap_table_1])
current_s_2 = score_eval([swap_table_2])
swap_table_1[swap_student_1], swap_table_2[swap_student_2] = swap_table_2[swap_student_2], swap_table_1[swap_student_1]
new_score = score_eval(tables)
if no_leeching:
new_s_1 = score_eval([swap_table_1])
new_s_2 = score_eval([swap_table_2])
if (new_score < current_score) or (no_leeching and ((new_s_1 < current_s_1) or (new_s_2 < current_s_2))):
attempts.append(swap_table_1[swap_student_1].name + swap_table_2[swap_student_2].name)
swap_table_1[swap_student_1], swap_table_2[swap_student_2] = swap_table_2[swap_student_2], swap_table_1[swap_student_1]
#SEND EM BACK
#print(current_score)
give_up += 1
else:
give_up = 0
#attempts = []
#print("swap!" + str(random.random()))
#print(maximum_boredom-boredom)
if new_score == current_score:
#boredom += 1
#if boredom > maximum_boredom:
# print("What!")
# break
pass
else:
boredom = 0
attempts = []
if len(attempts) > 999999:
print(len(attempts))
if len(attempts) >= maximum_swaps:
break
if current_score > world_record:
world_record = current_score
record_breaks += 1
print("Record broken!")
#time.sleep(0.5)
elif current_score == world_record:
print("Record found again!")
else:
print("no record broken!")
#time.sleep(0.5)
print(current_score)
print("Total happiness: " + str(world_record))
print("Average happiness: " + str(world_record/len(allstudents)))
for table in tables:
print("Group " + str(tables.index(table)) + ": ")
print("Happiness: " + str(score_eval([table])))
if score_eval([table]) < 0:
print("This group has a negative score! They probably won't have a lot of fun. You probably shouldn't be seeing this; maybe try running the program again?")
for person in table:
print(person.name)
print("Results for no leeching:")
test(True, True)
print("Results for yes leeching")
test(False, True)
#In "no leeching" trials, a swap that improves the rating of one table at the expense of another is not allowed.
#I'm unsure as to whether this actually improves the results you get. Thus why the program gives you results both without and with it.
print("Type \"yes\" and hit enter to leave this program. This will likely vanish your results, so copy them somewhere first! ")
while True:
if input("> ")[0] == 'y':
break
| 37.984615 | 163 | 0.714662 | 1,594 | 9,876 | 4.297365 | 0.230238 | 0.044672 | 0.026277 | 0.022482 | 0.228759 | 0.197226 | 0.190511 | 0.159854 | 0.159854 | 0.159854 | 0 | 0.018907 | 0.175273 | 9,876 | 259 | 164 | 38.131274 | 0.822099 | 0.352471 | 0 | 0.319018 | 0 | 0.01227 | 0.11308 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018405 | false | 0.006135 | 0.02454 | 0 | 0.055215 | 0.147239 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
777b5e5839004ea6dfd6b25eec2bff8b3f5e00ac | 1,045 | py | Python | unit_test/test_obj_avoidance.py | riven314/capstone_dash_interface | 5eab25f4c15ad09aa889554820231175b0a3ed28 | [
"CC0-1.0"
] | 1 | 2019-12-10T14:59:12.000Z | 2019-12-10T14:59:12.000Z | unit_test/test_obj_avoidance.py | riven314/capstone_dash_interface | 5eab25f4c15ad09aa889554820231175b0a3ed28 | [
"CC0-1.0"
] | null | null | null | unit_test/test_obj_avoidance.py | riven314/capstone_dash_interface | 5eab25f4c15ad09aa889554820231175b0a3ed28 | [
"CC0-1.0"
] | 1 | 2020-01-01T12:24:51.000Z | 2020-01-01T12:24:51.000Z | import os
import sys
import time
PATH = os.path.join(os.getcwd(), '..')
sys.path.append(PATH)
import cv2
import numpy as np
import matplotlib.pyplot as plt
from obj_avoidance import run_avoidance
# for the reference
label_dict = {1: 'wall', 2: 'floor', 3: 'plant', 4: 'ceiling', 5: 'furniture', 6: 'person', 7: 'door', 8: 'objects'}
# read in image
D1_IMG_PATH = os.path.join(os.getcwd(), '..', 'test_cases', 'test_obj_avoid_resize_d1.png')
SEG_IDX_PATH = os.path.join(os.getcwd(), '..', 'test_cases', 'test_obj_avoid_pred_idx.png')
d1_img = cv2.imread(D1_IMG_PATH, cv2.IMREAD_GRAYSCALE)
seg_idx = cv2.imread(SEG_IDX_PATH, cv2.IMREAD_GRAYSCALE)
for i in range(5):
start = time.time()
obj_tup, obj_img = run_avoidance(d1_img, seg_idx, depth_threshold = 8, visible_width = 90)
end = time.time()
print('obj_tup = {}'.format(obj_tup))
print('runtime: {}'.format(end - start))
rgb_img = cv2.cvtColor(obj_img, cv2.COLOR_GRAY2RGB)
plt.imshow(obj_img)
plt.show()
plt.imshow(rgb_img)
plt.show()
| 30.735294 | 117 | 0.683254 | 168 | 1,045 | 4.017857 | 0.440476 | 0.02963 | 0.044444 | 0.062222 | 0.16 | 0.16 | 0.127407 | 0.127407 | 0.127407 | 0.127407 | 0 | 0.028441 | 0.158852 | 1,045 | 33 | 118 | 31.666667 | 0.739477 | 0.029665 | 0 | 0.08 | 0 | 0 | 0.154555 | 0.056295 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.28 | 0 | 0.28 | 0.08 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
777c099bcd6dbabf7bd899cf4da7520acddaee95 | 1,060 | py | Python | tests/core/test_security.py | congdh/fastapi-async-realworld | 608dc6f090f8a02e0a880cef33dca90df78cbfb5 | [
"MIT"
] | null | null | null | tests/core/test_security.py | congdh/fastapi-async-realworld | 608dc6f090f8a02e0a880cef33dca90df78cbfb5 | [
"MIT"
] | null | null | null | tests/core/test_security.py | congdh/fastapi-async-realworld | 608dc6f090f8a02e0a880cef33dca90df78cbfb5 | [
"MIT"
] | 3 | 2020-10-04T09:37:21.000Z | 2022-02-13T08:57:35.000Z | from datetime import timedelta
import pytest
from fastapi import HTTPException
from pydantic import SecretStr
from app.core.security import (
create_access_token,
get_password_hash,
get_user_id_from_token,
verify_password,
)
pytestmark = pytest.mark.asyncio
async def test_access_token():
user_id = 1
token = create_access_token(user_id)
actual = get_user_id_from_token(token)
assert int(actual) == user_id
token = create_access_token(user_id, timedelta(minutes=1234))
actual = get_user_id_from_token(token)
assert int(actual) == user_id
def test_get_user_id_from_wrong_token():
token = "wrong-token"
with pytest.raises(HTTPException):
get_user_id_from_token(token)
def test_verify_password():
plain = "abcxyz"
assert verify_password(
plain_password=SecretStr(plain),
hashed_password=get_password_hash(SecretStr(plain)),
)
def test_verify_password_str():
with pytest.raises(AttributeError, match=r"get_secret_value"):
verify_password("abc", "abc")
| 23.555556 | 66 | 0.737736 | 142 | 1,060 | 5.147887 | 0.323944 | 0.082079 | 0.06156 | 0.088919 | 0.269494 | 0.24487 | 0.136799 | 0.136799 | 0.136799 | 0.136799 | 0 | 0.005747 | 0.179245 | 1,060 | 44 | 67 | 24.090909 | 0.834483 | 0 | 0 | 0.125 | 0 | 0 | 0.036792 | 0 | 0 | 0 | 0 | 0 | 0.09375 | 1 | 0.09375 | false | 0.25 | 0.15625 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
777c87854da8e0921a6611366e0397f0b9cee628 | 5,199 | py | Python | rmexp/schedule.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
] | 1 | 2021-05-12T12:49:15.000Z | 2021-05-12T12:49:15.000Z | rmexp/schedule.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
] | null | null | null | rmexp/schedule.py | junjuew/scalable-edge-native-applications | 1ace9ce71bef267880049633160e7cb74c57bddb | [
"Apache-2.0"
] | 1 | 2021-11-21T08:12:19.000Z | 2021-11-21T08:12:19.000Z | from __future__ import absolute_import, division, print_function
import logging
import select
import subprocess
import fire
import logzero
import numpy as np
from logzero import logger
import itertools
import scipy
import scipy.optimize
import cPickle as pickle
logzero.loglevel(logging.DEBUG)
def group(lst, n):
"""group([0,3,4,10,2,3], 2) => iterator
Group an iterable into an n-tuples iterable. Incomplete tuples
are discarded e.g.
>>> list(group(range(10), 3))
[(0, 1, 2), (3, 4, 5), (6, 7, 8)]
"""
return itertools.izip(*[itertools.islice(lst, i, None, n) for i in range(n)])
class ScipySolver(object):
def __init__(self, fair=False, brute_force=False):
super(ScipySolver, self).__init__()
self.fair = fair
self.brute_force = brute_force
def solve(self, cpu, mem, apps, max_clients):
x0 = zip(*[app.x0 for app in apps])
util_funcs = [app.util_func for app in apps]
def _get_app_util((util_func, cpu, mem, k)):
# print(util_func, cpu, mem, k)
return k * util_func(cpu, mem)
def total_util_func(x):
assert(len(x) % 3 == 0)
cpus = x[:len(apps)]
mems = x[len(apps): 2*len(apps)]
ks_raw = x[2*len(apps):]
ks = np.floor(ks_raw)
utils = map(_get_app_util, zip(util_funcs, cpus, mems, ks))
if self.fair: # max min
util_total = np.min(utils)
else: # total util
util_total = sum(utils)
logger.debug("total: {}, utils: {}, x: {}".format(
np.around(util_total, 1),
np.around(utils, 1),
np.around(x, 1)))
return -util_total
def cpu_con(x):
cpus = x[:len(apps)]
return cpu - np.sum(cpus)
def mem_con(x):
mems = x[len(apps): 2*len(apps)]
return mem - np.sum(mems)
def kworker_con(x):
cpus = x[:len(apps)]
mems = x[len(apps): 2*len(apps)]
ks = np.floor(x[2*len(apps):])
latency_funcs = [app.latency_func for app in apps]
latencies = np.array(map(lambda arg: arg[0](
arg[1], arg[2]), zip(latency_funcs, cpus, mems)))
return np.array(max_clients) * 30. - ks * 1000. / latencies
# feasible region
bounds = [(0.01, cpu) for _ in apps] + [(0.01, mem)
for _ in apps] + list(zip([0]*len(apps), max_clients))
if self.brute_force:
rranges = []
for item in bounds:
rranges.append(slice(item[0], item[1], 0.5))
logger.debug(rranges)
res = scipy.optimize.brute(
total_util_func, rranges, full_output=True, finish=None)
return res
else:
# constraints total resource
cons = [
{'type': 'eq', 'fun': cpu_con},
{'type': 'eq', 'fun': mem_con},
{'type': 'ineq', 'fun': kworker_con},
# ks should be larger or equal than 0
{'type': 'ineq', 'fun': lambda x: x[2*len(apps):]},
]
res = scipy.optimize.minimize(
total_util_func, (np.array(x0[0]), np.array(x0[1]), np.array([0.] * len(max_clients))), constraints=cons, bounds=bounds, tol=1e-6)
return res.success, -res.fun, np.around(res.x, decimals=1)
class Allocator(object):
"""Allocate CPU, Memory to applications."""
def __init__(self, solver):
self.solver = solver
super(Allocator, self).__init__()
def solve(self, cpu, mem, apps, *args, **kwargs):
return self.solver.solve(cpu, mem, apps, *args, **kwargs)
class AppUtil(object):
def __init__(self, app, exp='c001-cg-wall-w1'):
super(AppUtil, self).__init__()
self.app = app
self.exp = exp
self.util_func = self._load_util_func()
self.latency_func = self._load_latency_func()
self.x0 = (1, 2)
def _load_util_func(self):
path = '/home/junjuew/work/resource-management/data/profile/{}-{}.pkl'.format(
self.exp, self.app)
logger.debug("Using profile {}".format(path))
with open(path, 'rb') as f:
util_func = pickle.load(f)
return util_func
def _load_latency_func(self):
"""Latencies are in ms"""
path = '/home/junjuew/work/resource-management/data/profile/latency-{}-{}.pkl'.format(
self.exp, self.app)
logger.debug("Using profile {}".format(path))
with open(path, 'rb') as f:
util_func = pickle.load(f)
return util_func
if __name__ == '__main__':
# pingpong is a dominate when cpu=1 and mem=2
# dominance: pingpong >> lego >>
allocator = Allocator(ScipySolver(fair=False, brute_force=False))
# for cpu in range(1, 6):
# cpu = 1
cpu = 2
mem = 4
max_clients = [2.5, 3.5, 1.5, 1.5]
app_names = ['lego', 'pingpong', 'pool', 'face']
apps = map(AppUtil, app_names)
logger.info(allocator.solve(cpu, mem, apps, max_clients=max_clients))
| 32.698113 | 146 | 0.553568 | 694 | 5,199 | 3.994236 | 0.259366 | 0.040404 | 0.017316 | 0.012987 | 0.235931 | 0.174603 | 0.142857 | 0.135642 | 0.10101 | 0.10101 | 0 | 0.023295 | 0.306405 | 5,199 | 158 | 147 | 32.905063 | 0.745424 | 0.045009 | 0 | 0.162162 | 0 | 0 | 0.059037 | 0.027807 | 0 | 0 | 0 | 0 | 0.009009 | 0 | null | null | 0 | 0.108108 | null | null | 0.009009 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
777d83516dc3cad54a0ce07bf96a195097e68974 | 1,519 | py | Python | demoapp/cabinet_structure/settings.py | pythoninner/myfistpython | c1f52d8a3a284a89c0f1e33615067e8845aa1617 | [
"MIT"
] | 17 | 2015-12-10T02:09:07.000Z | 2018-06-25T06:46:59.000Z | demoapp/cabinet_structure/settings.py | pythoninner/myfistpython | c1f52d8a3a284a89c0f1e33615067e8845aa1617 | [
"MIT"
] | 6 | 2015-12-09T08:09:52.000Z | 2016-01-11T06:53:10.000Z | demoapp/cabinet_structure/settings.py | pythoninner/myfistpython | c1f52d8a3a284a89c0f1e33615067e8845aa1617 | [
"MIT"
] | 9 | 2015-12-10T09:04:00.000Z | 2019-07-12T13:33:25.000Z | import logging
from django.conf import settings
logger = logging.getLogger(__name__)
class VerifySettings(object):
__settings_name__ = 'MAX_CABINET_ROWS_NUM'
__error_msg_dict__ = {'no_attribute': ("MAX_CABINET_ROWS_NUM must be specified in "
"your Django settings file"),
'no_int': ("MAX_CABINET_ROWS_NUM "
"must be specified integer"),
'min_cells': ("cabinet_cells "
"is bigger than MAX_CABINET_ROWS_NUM")}
def __init__(self, cabinet_cells):
self.cabinet_cells = cabinet_cells
@property
def settings_max_cabinet_num(self):
return getattr(
settings, self.__settings_name__, 6
)
def verify_cabinet_num_type(self):
if not isinstance(self.settings_max_cabinet_num, int):
log_msg = self.__error_msg_dict__['no_int']
logger.info(log_msg)
raise TypeError(log_msg)
else:
return self.settings_max_cabinet_num
def verify_cabinet_cells_gt_max_cabinet_num(self):
if self.cabinet_cells < self.settings_max_cabinet_num:
raise TypeError(self.__error_msg_dict__['min_cells'])
return self.verify_cabinet_num_type()
class SettingsConfig(VerifySettings):
def verify(self):
max_rows_num = super(SettingsConfig, self).verify_cabinet_cells_gt_max_cabinet_num()
return max_rows_num
| 33.755556 | 92 | 0.631995 | 175 | 1,519 | 4.942857 | 0.308571 | 0.115607 | 0.090173 | 0.078613 | 0.236994 | 0.150289 | 0.150289 | 0 | 0 | 0 | 0 | 0.000935 | 0.296248 | 1,519 | 44 | 93 | 34.522727 | 0.808232 | 0 | 0 | 0 | 0 | 0 | 0.147465 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.151515 | false | 0 | 0.060606 | 0.030303 | 0.454545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
777dd8bf196474b662d087b28f11dea01ef85e30 | 756 | py | Python | l_04_list_and_dictionaries/dictionaries/ex_08_filter_base.py | VasAtanasov/SoftUni-Python-Fundamentals | 471d0537dd6e5c8b61ede92b7673c0d67e2964fd | [
"MIT"
] | 1 | 2019-06-05T11:16:08.000Z | 2019-06-05T11:16:08.000Z | l_04_list_and_dictionaries/dictionaries/ex_08_filter_base.py | VasAtanasov/SoftUni-Python-Fundamentals | 471d0537dd6e5c8b61ede92b7673c0d67e2964fd | [
"MIT"
] | null | null | null | l_04_list_and_dictionaries/dictionaries/ex_08_filter_base.py | VasAtanasov/SoftUni-Python-Fundamentals | 471d0537dd6e5c8b61ede92b7673c0d67e2964fd | [
"MIT"
] | null | null | null | def check_type(text):
try:
if float(text) == int(float(text)):
return {'Age': int(float(text))}
elif float(text) != int(float(text)):
return {'Salary': float(text)}
except ValueError:
return {'Position': text}
employees_data = []
while True:
in_line = input()
if 'filter base' == in_line:
break
[name, value] = filter(None, in_line.split(" -> "))
employees_data.append({name: check_type(value)})
criteria = input()
separator = '=' * 20
result = ''
for entry in range(len(employees_data)):
for name, data in employees_data[entry].items():
if criteria in data:
result += f'Name: {name}\n{criteria}: {data[criteria]}\n{separator}\n'
print(result)
| 23.625 | 82 | 0.589947 | 95 | 756 | 4.6 | 0.442105 | 0.12357 | 0.08238 | 0.077803 | 0.12357 | 0.12357 | 0 | 0 | 0 | 0 | 0 | 0.003515 | 0.247355 | 756 | 31 | 83 | 24.387097 | 0.764499 | 0 | 0 | 0 | 0 | 0 | 0.119048 | 0.041005 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043478 | false | 0 | 0 | 0 | 0.173913 | 0.043478 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
777ef7869d9e99660684cbd42f3a06660212884f | 905 | py | Python | install_release.py | NunoEdgarGFlowHub/poptorch | 2e69b81c7c94b522d9f57cc53d31be562f5e3749 | [
"MIT"
] | null | null | null | install_release.py | NunoEdgarGFlowHub/poptorch | 2e69b81c7c94b522d9f57cc53d31be562f5e3749 | [
"MIT"
] | null | null | null | install_release.py | NunoEdgarGFlowHub/poptorch | 2e69b81c7c94b522d9f57cc53d31be562f5e3749 | [
"MIT"
] | null | null | null | # Copyright (c) 2018 Graphcore Ltd. All rights reserved.
# This script is run by the release agent to create a release of PopTorch
def install_release(release_utils, release_id, snapshot_id, version_str):
# Tag must contain the string 'poptorch' to keep it unique.
tag = "{}-poptorch".format(version_str)
release_utils.log.info('Tagging poptorch release ' + tag)
# Create the release on the document server.
release_utils.create_document_release(snapshot_id)
# Tag the view repository with the release.
release_utils.tag_view_repo(
'ssh://git@phabricator.sourcevertex.net/diffusion/' \
+ 'POPONNXVIEW/poponnxview.git',
snapshot_id,
release_id,
tag)
# Increment the point version number.
release_utils.increment_version_point(
'ssh://git@phabricator.sourcevertex.net/diffusion/' \
+ 'POPTORCH/poptorch.git')
| 34.807692 | 73 | 0.707182 | 115 | 905 | 5.4 | 0.486957 | 0.096618 | 0.061192 | 0.093398 | 0.132045 | 0.132045 | 0 | 0 | 0 | 0 | 0 | 0.00554 | 0.20221 | 905 | 25 | 74 | 36.2 | 0.854571 | 0.337017 | 0 | 0.153846 | 0 | 0 | 0.306914 | 0.246206 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0 | 0 | 0.076923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
778181d3874c190067f24ee03aa82a2d0d031e77 | 1,667 | py | Python | ciscosparkapi/tests/conftest.py | Futuramistic/Bot | e22672e9d627faf3d9393feb04d214cb62dec98d | [
"MIT"
] | null | null | null | ciscosparkapi/tests/conftest.py | Futuramistic/Bot | e22672e9d627faf3d9393feb04d214cb62dec98d | [
"MIT"
] | 1 | 2021-06-01T21:52:12.000Z | 2021-06-01T21:52:12.000Z | ciscosparkapi/tests/conftest.py | Futuramistic/Bot | e22672e9d627faf3d9393feb04d214cb62dec98d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""pytest configuration and top-level fixtures."""
__author__ = "Chris Lunsford"
__author_email__ = "chrlunsf@cisco.com"
__copyright__ = "Copyright (c) 2016-2018 Cisco and/or its affiliates."
__license__ = "MIT"
import os
import string
import tempfile
import pytest
from tests.utils import download_file
pytest_plugins = [
'tests.test_ciscosparkapi',
'tests.api.test_memberships',
'tests.api.test_messages',
'tests.api.test_people',
'tests.api.test_rooms',
'tests.api.test_teammemberships',
'tests.api.test_teams',
'tests.api.test_webhooks',
'tests.api.test_organizations',
'tests.api.test_licenses',
'tests.api.test_roles',
]
TEST_DOMAIN = "cmlccie.com"
TEST_FILE_URL = "https://developer.ciscospark.com/images/logo_spark_lg@256.png"
email_template = string.Template("test${number}@" + TEST_DOMAIN)
# Helper Functions
def new_email_generator():
i = 50
while True:
email_address = email_template.substitute(number=i)
i += 1
yield email_address
# pytest Fixtures
@pytest.fixture("session")
def temp_directory():
directory_abs_path = tempfile.mkdtemp()
yield directory_abs_path
os.rmdir(directory_abs_path)
@pytest.fixture("session")
def local_file(temp_directory):
file = download_file(TEST_FILE_URL, temp_directory)
yield file
os.remove(file)
@pytest.fixture(scope="session")
def get_new_email_address():
generator = new_email_generator()
def inner_function():
return next(generator)
return inner_function
| 21.101266 | 80 | 0.675465 | 200 | 1,667 | 5.32 | 0.455 | 0.075188 | 0.112782 | 0.043233 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011442 | 0.213557 | 1,667 | 78 | 81 | 21.371795 | 0.800153 | 0.059988 | 0 | 0.042553 | 0 | 0 | 0.305199 | 0.133693 | 0 | 0 | 0 | 0 | 0 | 1 | 0.106383 | false | 0 | 0.106383 | 0.021277 | 0.255319 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77823bcef33f848f86c9133fcce12df4adef4d86 | 27 | py | Python | __init__.py | GRV96/hm_duration | 89de54a114cab42862dbe6b6dd5b2180adf2ee0d | [
"MIT"
] | null | null | null | __init__.py | GRV96/hm_duration | 89de54a114cab42862dbe6b6dd5b2180adf2ee0d | [
"MIT"
] | null | null | null | __init__.py | GRV96/hm_duration | 89de54a114cab42862dbe6b6dd5b2180adf2ee0d | [
"MIT"
] | null | null | null | from .hm_duration import *
| 13.5 | 26 | 0.777778 | 4 | 27 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148148 | 27 | 1 | 27 | 27 | 0.869565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
778378ad2cae07b1eae7fe45253c50b387c8c03b | 256 | py | Python | animations/color/list.py | LeLuxNet/GridPy | 5f4d02d2b254be1f0682b724a96a99009a415308 | [
"MIT"
] | null | null | null | animations/color/list.py | LeLuxNet/GridPy | 5f4d02d2b254be1f0682b724a96a99009a415308 | [
"MIT"
] | 1 | 2020-05-09T15:48:41.000Z | 2020-05-21T20:14:21.000Z | animations/color/list.py | LeLuxNet/GridPy | 5f4d02d2b254be1f0682b724a96a99009a415308 | [
"MIT"
] | null | null | null | from animations.color import base
class ListGenerator(base.IndexColorGeneration):
def __init__(self, colors):
super().__init__(len(colors))
self.colors = colors
def generate_index(self, index):
return self.colors[index]
| 21.333333 | 47 | 0.691406 | 29 | 256 | 5.793103 | 0.586207 | 0.178571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.210938 | 256 | 11 | 48 | 23.272727 | 0.831683 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.142857 | 0.142857 | 0.714286 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 4 |
77842d9f499b49e55088705f1121160416001ae6 | 1,550 | py | Python | nalu.py | chauhanjatin10/NeuralArithmeticLogicUnits | c8a71e4bb99b7f5bdb5c1b85a75376b0322d6853 | [
"MIT"
] | null | null | null | nalu.py | chauhanjatin10/NeuralArithmeticLogicUnits | c8a71e4bb99b7f5bdb5c1b85a75376b0322d6853 | [
"MIT"
] | null | null | null | nalu.py | chauhanjatin10/NeuralArithmeticLogicUnits | c8a71e4bb99b7f5bdb5c1b85a75376b0322d6853 | [
"MIT"
] | null | null | null | import math
import torch
import torch.nn as nn
import torch.nn.functional as Func
import torch.nn.init as init
from nac import NeuralAccumulatorCell
from torch.nn.parameter import Parameter
class NeuralArithmeticLogicCell(nn.Module):
def __init__(self, in_dim, out_dim):
super().__init__()
self.in_dim = in_dim
self.out_dim = out_dim
self.eps = 1e-8
self.W_hat = Parameter(torch.Tensor(out_dim, in_dim))
self.M_hat = Parameter(torch.Tensor(out_dim, in_dim))
self.nac = NeuralAccumulatorCell(in_dim, out_dim)
self.G = Parameter(torch.Tensor(out_dim, in_dim))
self.register_parameter('bias', None)
init.kaiming_uniform_(self.G, a=math.sqrt(5))
def forward(self, inputs):
self.out_nac = self.nac(inputs)
self.g = Func.sigmoid(Func.linear(inputs, self.G, self.bias))
self.add_part = self.out_nac * self.g
self.log_part = torch.log(torch.abs(inputs) + self.eps)
self.m_part = torch.exp(self.nac(self.log_part))
self.mul_part = (1-self.g)*self.m_part
self.output = self.add_part + self.mul_part
return self.output
class NALU_mutiple_cells(nn.Module):
def __init__(self, num_layers, in_dim, hidden_dim,out_dim):
super().__init__()
self.num_layers = num_layers
self.in_dim = in_dim
self.out_dim = out_dim
self.hidden_dim = hidden_dim
layers = []
for i in range(num_layers):
layers.append(NeuralArithmeticLogicCell(
hidden_dim if i>0 else in_dim,
hidden_dim if i<num_layers-1 else out_dim))
self.model = nn.Sequential(*layers)
def forward(self, inputs):
return self.model(inputs)
| 28.703704 | 63 | 0.740645 | 257 | 1,550 | 4.214008 | 0.252918 | 0.050785 | 0.041551 | 0.055402 | 0.237304 | 0.202216 | 0.165282 | 0.165282 | 0.132964 | 0.062789 | 0 | 0.004508 | 0.14129 | 1,550 | 53 | 64 | 29.245283 | 0.809166 | 0 | 0 | 0.186047 | 0 | 0 | 0.002581 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.093023 | false | 0 | 0.162791 | 0.023256 | 0.348837 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
7784ba485cef29826c9b6771e291504192862a3d | 746 | py | Python | servers/python/coweb/bot/__init__.py | opencoweb/coweb | 7b3a87ee9eda735a859447d404ee16edde1c5671 | [
"AFL-2.1"
] | 83 | 2015-01-05T19:02:57.000Z | 2021-11-19T02:48:09.000Z | servers/python/coweb/bot/__init__.py | xuelingxiao/coweb | 7b3a87ee9eda735a859447d404ee16edde1c5671 | [
"AFL-2.1"
] | 3 | 2015-12-16T13:49:33.000Z | 2019-06-17T13:38:50.000Z | servers/python/coweb/bot/__init__.py | xuelingxiao/coweb | 7b3a87ee9eda735a859447d404ee16edde1c5671 | [
"AFL-2.1"
] | 14 | 2015-04-29T22:36:53.000Z | 2021-11-18T03:24:29.000Z | '''
Defines classes for coweb service bots.
Copyright (c) The Dojo Foundation 2011. All Rights Reserved.
Copyright (c) IBM Corporation 2008, 2011. All Rights Reserved.
'''
# std lib
import json
import sys
# coweb
from .reqack import ReqAckDelegate
def run(botClass):
'''
Looks on the command line for information about the bot wrapper to create.
'''
# decode json keyword arguments
try:
opts = json.loads(sys.argv[1])
except Exception:
# no-op on any kind of error; creation of wrapper expected to happen
# in some other way
return
# import wrapper to use based on managerId
mod = __import__('coweb.bot.wrapper.%s' % opts['managerId'], fromlist=[''])
mod.run(botClass, opts)
| 26.642857 | 79 | 0.676944 | 102 | 746 | 4.911765 | 0.676471 | 0.03992 | 0.051896 | 0.083832 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022688 | 0.231903 | 746 | 27 | 80 | 27.62963 | 0.851658 | 0.549598 | 0 | 0 | 0 | 0 | 0.094463 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.4 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
778a30afec7647b6e7e9030506f3d19a33890f34 | 4,097 | py | Python | bin/exps/batch_clone.py | akrishna1995/emuedge | d33845107be3c9bbfcaf030df0a989e9d4972743 | [
"MIT"
] | 8 | 2018-06-21T03:20:26.000Z | 2021-10-15T03:53:49.000Z | bin/exps/batch_clone.py | akrishna1995/emuedge | d33845107be3c9bbfcaf030df0a989e9d4972743 | [
"MIT"
] | 12 | 2018-05-21T17:26:59.000Z | 2018-06-14T02:48:21.000Z | bin/exps/batch_clone.py | akrishna1995/emuedge | d33845107be3c9bbfcaf030df0a989e9d4972743 | [
"MIT"
] | 3 | 2018-08-30T22:37:20.000Z | 2019-03-31T18:29:52.000Z | import logging, sys, XenAPI, os, time
sys.path.insert(0, '../../')
def init_session(uname, pwd, local=True):
# TODO: enable the init of a possibly remote session
if local:
session=XenAPI.xapi_local()
session.xenapi.login_with_password(uname, pwd)
else:
log('currently not support remote connection')
return session
# ssid: snapshot id
# number: how many instances to create
def bat_clone(session, ss_ref, number):
record=[]
ref_lst=[]
tot_start=time.time()
for i in range(0, number):
start=time.time()
vref=session.xenapi.VM.clone(ss_ref, "test"+str(i))
session.xenapi.VM.provision(vref)
stop=time.time()
elapse=stop-start
record.append(elapse)
ref_lst.append(vref)
tot_stop=time.time()
tot_elapse=tot_stop-tot_start
record.append("Total\t "+str(tot_elapse))
print "scale to "+str(number)+": "+"Total\t "+str(tot_elapse)+"s"
write_1darr_file("bat_clone_res"+str(number)+".csv", record)
bat_uninstall(session, ref_lst, number)
def async_clone(session, ss_ref, number):
ref_lst=[]
task_lst=[]
tot_start=time.time()
for i in range(0, number):
task=session.xenapi.Async.VM.clone(ss_ref, "test"+str(i))
task_lst.append(task)
finished=[False]*number
count=1
while count<=number:
for i in range(0, len(task_lst)):
if not finished[i]:
task=task_lst[i]
res=session.xenapi.task.get_result(task)
#print progress
if len(res)>0:
finished[i]=True
vref=session.xenapi.task.get_result(task)[7:-8]
#print vref
session.xenapi.VM.provision(vref)
ref_lst.append(vref)
count+=1
tot_stop=time.time()
tot_elapse=tot_stop-tot_start
#record.append("Total\t "+str(tot_elapse))
print "scale to "+str(number)+": "+"Total\t "+str(tot_elapse)+"s"
#write_1darr_file("bat_clone_res"+str(number)+".csv", record)
bat_uninstall(session, ref_lst, number)
def async_clone_new(session, ss_ref, number):
ref_lst=[]
task_lst=[]
tot_start=time.time()
for i in range(0, number):
task=session.xenapi.Async.VM.clone(ss_ref, "test"+str(i))
task_lst.append(task)
finished=[False]*number
count=1
while count<=number:
for i in range(0, len(task_lst)):
if not finished[i]:
task=task_lst[i]
res=session.xenapi.task.get_result(task)
#print progress
if len(res)>0:
finished[i]=True
vref=session.xenapi.task.get_result(task)[7:-8]
#print vref
session.xenapi.VM.provision(vref)
ref_lst.append(vref)
count+=1
tot_stop=time.time()
tot_elapse=tot_stop-tot_start
#record.append("Total\t "+str(tot_elapse))
print "scale to "+str(number)+": "+"Total\t "+str(tot_elapse)+"s"
#write_1darr_file("bat_clone_res"+str(number)+".csv", record)
bat_uninstall(session, ref_lst, number)
def bat_uninstall(session, ss_ref_lst, number):
record=[]
tot_start=time.time()
for ss_ref in ss_ref_lst:
start=time.time()
destroy_disk(session, ss_ref)
session.xenapi.VM.destroy(ss_ref)
stop=time.time()
elapse=stop-start
record.append(elapse)
tot_stop=time.time()
tot_elapse=tot_stop-tot_start
record.append("Total\t "+str(tot_elapse))
write_1darr_file("bat_uninstall_result"+str(number)+".csv", record)
def destroy_disk(session, vref):
vbd_list=session.xenapi.VM.get_VBDs(vref)
for vbd in vbd_list:
vdi=session.xenapi.VBD.get_VDI(vbd)
if vdi!='OpaqueRef:NULL':
session.xenapi.VDI.destroy(vdi)
else:
session.xenapi.VBD.destroy(vbd)
def write_2darr_file(fname, arr):
if os.path.exists(fname):
print fname+" already exists!"
return
file=open(fname, "w")
for row in arr:
for col in row:
file.write(str(col)+"\t")
file.write("\n")
file.close()
return
def write_1darr_file(fname, arr):
if os.path.exists(fname):
print fname+" already exists!"
return
file=open(fname, "w")
for row in arr:
file.write(str(row)+"\n")
file.close()
return
def main():
session=init_session("root", "789456123")
# test the batch install
ss_ref=session.xenapi.VM.get_by_uuid("32f4b1a2-cd4b-e8d5-1753-51e77205c27e")
i=0
while i < 50:
i+=10
print("scaling to "+str(i)+" devices")
#async_clone(session, ss_ref, i)
bat_clone(session, ss_ref, i)
main() | 25.93038 | 77 | 0.705882 | 668 | 4,097 | 4.169162 | 0.185629 | 0.084022 | 0.030162 | 0.030162 | 0.686176 | 0.616158 | 0.616158 | 0.608977 | 0.608977 | 0.576661 | 0 | 0.016111 | 0.136441 | 4,097 | 158 | 78 | 25.93038 | 0.771057 | 0.099829 | 0 | 0.68254 | 0 | 0 | 0.080544 | 0.009796 | 0 | 0 | 0 | 0.006329 | 0 | 0 | null | null | 0.007937 | 0.007937 | null | null | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
778afd4b3ea9feac924d9ce4230a66a795531f22 | 682 | py | Python | backend_drf_views_example.py | sunilparajuli/DRF-Vue-numbering-Pagination-Snippet | 5a6b3796656d74b65ad4b8e1d3144afe54dcb0f6 | [
"Apache-2.0"
] | 2 | 2021-06-02T16:29:16.000Z | 2021-06-02T16:29:18.000Z | backend_drf_views_example.py | sunilparajuli/DRF-Vue-numbering-Pagination-Snippet | 5a6b3796656d74b65ad4b8e1d3144afe54dcb0f6 | [
"Apache-2.0"
] | null | null | null | backend_drf_views_example.py | sunilparajuli/DRF-Vue-numbering-Pagination-Snippet | 5a6b3796656d74b65ad4b8e1d3144afe54dcb0f6 | [
"Apache-2.0"
] | null | null | null |
#example pagination parameter having total_pages from backend
class PageNumberPaginationWithCount(pagination.PageNumberPagination):
def get_paginated_response(self, data):
response = super(PageNumberPaginationWithCount, self).get_paginated_response(data)
response.data['total_pages'] = self.page.paginator.num_pages
return response
#example list api view
class VisitAPIView(generics.ListAPIView):
pagination_class = PageNumbePaginationWithCount
serializer_class = VisitSerializer
def get_queryset(self):
return Visit.objects.all()
#example url
urlspattern = [
re_path(r'^api/v2/visits/', VisitAPIView.as_view(), name="visits_api"),
]
| 26.230769 | 90 | 0.774194 | 74 | 682 | 6.959459 | 0.594595 | 0.038835 | 0.07767 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001701 | 0.13783 | 682 | 25 | 91 | 27.28 | 0.87415 | 0.13783 | 0 | 0 | 0 | 0 | 0.06175 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153846 | false | 0 | 0 | 0.076923 | 0.615385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
778cfa1ba927b0196782d749a97b1b4c8af54073 | 5,170 | py | Python | src/vivarium/examples/disease_model/population.py | ihmeuw/vivarium | 77393d2e84ff2351c926f65b33272b7225cf9628 | [
"BSD-3-Clause"
] | 41 | 2017-07-14T03:39:06.000Z | 2022-03-20T05:36:33.000Z | src/vivarium/examples/disease_model/population.py | ihmeuw/vivarium | 77393d2e84ff2351c926f65b33272b7225cf9628 | [
"BSD-3-Clause"
] | 26 | 2017-08-08T22:13:44.000Z | 2021-08-18T00:14:54.000Z | src/vivarium/examples/disease_model/population.py | ihmeuw/vivarium | 77393d2e84ff2351c926f65b33272b7225cf9628 | [
"BSD-3-Clause"
] | 8 | 2017-08-03T17:15:39.000Z | 2021-09-30T21:57:50.000Z | import pandas as pd
from vivarium.framework.engine import Builder
from vivarium.framework.population import SimulantData
from vivarium.framework.event import Event
class BasePopulation:
"""Generates a base population with a uniform distribution of age and sex.
Attributes
----------
configuration_defaults :
A set of default configuration values for this component. These can be
overwritten in the simulation model specification or by providing
override values when constructing an interactive simulation.
"""
configuration_defaults = {
'population': {
# The range of ages to be generated in the initial population
'age_start': 0,
'age_end': 100,
# Note: There is also a 'population_size' key.
},
}
def __init__(self):
self.name = 'base_population'
# noinspection PyAttributeOutsideInit
def setup(self, builder: Builder):
"""Performs this component's simulation setup.
The ``setup`` method is automatically called by the simulation
framework. The framework passes in a ``builder`` object which
provides access to a variety of framework subsystems and metadata.
Parameters
----------
builder :
Access to simulation tools and subsystems.
"""
self.config = builder.configuration
self.with_common_random_numbers = bool(self.config.randomness.key_columns)
self.register = builder.randomness.register_simulants
if (self.with_common_random_numbers
and not ['entrance_time', 'age'] == self.config.randomness.key_columns):
raise ValueError("If running with CRN, you must specify ['entrance_time', 'age'] as"
"the randomness key columns.")
self.age_randomness = builder.randomness.get_stream('age_initialization',
for_initialization=self.with_common_random_numbers)
self.sex_randomness = builder.randomness.get_stream('sex_initialization')
columns_created = ['age', 'sex', 'alive', 'entrance_time']
builder.population.initializes_simulants(self.on_initialize_simulants,
creates_columns=columns_created)
self.population_view = builder.population.get_view(columns_created)
builder.event.register_listener('time_step', self.age_simulants)
def on_initialize_simulants(self, pop_data: SimulantData):
"""Called by the simulation whenever new simulants are added.
This component is responsible for creating and filling four columns
in the population state table:
'age' :
The age of the simulant in fractional years.
'sex' :
The sex of the simulant. One of {'Male', 'Female'}
'alive' :
Whether or not the simulant is alive. One of {'alive', 'dead'}
'entrance_time' :
The time that the simulant entered the simulation. The 'birthday'
for simulants that enter as newborns. A `pandas.Timestamp`.
Parameters
----------
pop_data :
A record containing the index of the new simulants, the
start of the time step the simulants are added on, the width
of the time step, and the age boundaries for the simulants to
generate.
"""
age_start = self.config.population.age_start
age_end = self.config.population.age_end
if age_start == age_end:
age_window = pop_data.creation_window / pd.Timedelta(days=365)
else:
age_window = age_end - age_start
age_draw = self.age_randomness.get_draw(pop_data.index)
age = age_start + age_draw * age_window
if self.with_common_random_numbers:
population = pd.DataFrame({'entrance_time': pop_data.creation_time,
'age': age.values}, index=pop_data.index)
self.register(population)
population['sex'] = self.sex_randomness.choice(pop_data.index, ['Male', 'Female'])
population['alive'] = 'alive'
else:
population = pd.DataFrame(
{'age': age.values,
'sex': self.sex_randomness.choice(pop_data.index, ['Male', 'Female']),
'alive': pd.Series('alive', index=pop_data.index),
'entrance_time': pop_data.creation_time},
index=pop_data.index)
self.population_view.update(population)
def age_simulants(self, event: Event):
"""Updates simulant age on every time step.
Parameters
----------
event :
An event object emitted by the simulation containing an index
representing the simulants affected by the event and timing
information.
"""
population = self.population_view.get(event.index, query="alive == 'alive'")
population['age'] += event.step_size / pd.Timedelta(days=365)
self.population_view.update(population)
| 39.769231 | 111 | 0.623985 | 580 | 5,170 | 5.408621 | 0.293103 | 0.024546 | 0.022952 | 0.025502 | 0.161938 | 0.068856 | 0.030602 | 0.030602 | 0.030602 | 0.030602 | 0 | 0.002727 | 0.290716 | 5,170 | 129 | 112 | 40.077519 | 0.852741 | 0.339458 | 0 | 0.071429 | 0 | 0 | 0.102041 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.071429 | 0 | 0.178571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
778df52cd98d358aeeaa3cd2f864bdb6647f52ff | 5,390 | py | Python | python3/ssm/requester.py | renardbebe/ssm-rotation-sdk-python | cffe573f5462d0b93be9b5fb110970ade98f1642 | [
"Apache-2.0"
] | null | null | null | python3/ssm/requester.py | renardbebe/ssm-rotation-sdk-python | cffe573f5462d0b93be9b5fb110970ade98f1642 | [
"Apache-2.0"
] | null | null | null | python3/ssm/requester.py | renardbebe/ssm-rotation-sdk-python | cffe573f5462d0b93be9b5fb110970ade98f1642 | [
"Apache-2.0"
] | 1 | 2021-07-21T02:46:58.000Z | 2021-07-21T02:46:58.000Z | import logging
from threading import Timer
from tencentcloud.ssm.v20190923 import models, ssm_client
from tencentcloud.common.profile import client_profile
from tencentcloud.common import credential
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
class Error:
"""่ชๅฎไน้่ฏฏ็ฑป
"""
def __init__(self, message=None):
"""
:param message: ้่ฏฏไฟกๆฏ
:type message: str
"""
if message is None:
self.message = None
else:
self.message = message
class LoopTimer(Timer):
"""ๅฎๆถๅจ็ฑป
"""
def __init__(self, interval, function, args, kwargs):
Timer.__init__(self, interval, function, args, kwargs)
def run(self):
"""ๆฏ้ๆๅฎๆถ้ด่ฐ็จไธๆฌกๅฝๆฐ
"""
while True:
self.finished.wait(self.interval)
if self.finished.is_set():
self.finished.set()
break
self.function(*self.args, **self.kwargs)
class DbAccount:
"""DB ่ดฆๅทไฟกๆฏ็ฑป
"""
def __init__(self, params=None):
"""
:param user_name: ็จๆทๅ
:type user_name: str
:param password: ๅฏ็
:type password: str
"""
if params is None:
self.user_name = None
self.password = None
else:
self.user_name = params['user_name'] if 'user_name' in params else None
self.password = params['password'] if 'password' in params else None
class SsmAccount:
"""SSM ่ดฆๅทไฟกๆฏ็ฑป
"""
def __init__(self, params=None):
"""
:param secret_id: ๅฏ้ฅID๏ผ็จไบๆ ่ฏ่ฐ็จ่
่บซไปฝ๏ผ็ฑปไผผ็จๆทๅ๏ผ
:type secret_id: str
:param secret_key: ๅฏ้ฅๅผ๏ผ็จไบ้ช่ฏ่ฐ็จ่
่บซไปฝ๏ผ็ฑปไผผๅฏ็ ๏ผ
:type secret_key: str
:param url: SSM ๆๅกๅฐๅ
:type url: str
:param region: ๅฐๅ
:type region: str
"""
if params is None:
self.secret_id = None # string `yaml:"secret_id"`
self.secret_key = None # string `yaml:"secret_key"`
self.url = None # string `yaml:"url"`
self.region = None # string `yaml:"region"`
else:
self.secret_id = params['secret_id'] if 'secret_id' in params else None
self.secret_key = params[
'secret_key'] if 'secret_key' in params else None
self.url = params['url'] if 'url' in params else None
self.region = params['region'] if 'region' in params else None
def __get_client(secret_id, secret_key, url, region):
"""ๅๅปบ SSM ๅฎขๆท็ซฏๅฎไพ
:param secret_key: ๅฏ้ฅID
:type secret_key: str
:param url: SSM ๆๅกๅฐๅ
:type url: str
:param region: ๅฐๅ
:type region: str
:rtype :client: SSM ๅฎขๆท็ซฏๅฎไพ
:rtype :error: ๅผๅธธๆฅ้ไฟกๆฏ
"""
cred = credential.Credential(secret_id, secret_key)
http_profile = client_profile.HttpProfile()
http_profile.reqMethod = "POST"
if url and len(url) != 0:
http_profile.endpoint = url
# ๅฎขๆท็ซฏ้
็ฝฎ
cpf = client_profile.ClientProfile()
cpf.httpProfile = http_profile
client, err = None, None
try:
# ๅๅปบ SSM ๅฎขๆท็ซฏๅฏน่ฑก
client = ssm_client.SsmClient(cred, region, cpf)
except TencentCloudSDKException as e:
err = Error(str(e.args[0]))
return client, err
def __get_current_product_secret_value(secret_name, ssm_acc):
"""่ทๅๅฝๅไบไบงๅๅญๆฎๅ
ๅฎน
:param secret_name: ๅญๆฎๅ็งฐ
:type secret_name: str
:param ssm_acc: SSM ่ดฆๅทไฟกๆฏ
:type ssm_acc: SsmAccount class
:rtype :str: ๅญๆฎๅ
ๅฎน
:rtype :error: ๅผๅธธๆฅ้ไฟกๆฏ
"""
print("get value for secret_name=%s" % secret_name)
# print("get_client: ", ssm_acc.secret_id, ssm_acc.secret_key, ssm_acc.url, ssm_acc.region)
client, err = __get_client(ssm_acc.secret_id, ssm_acc.secret_key, ssm_acc.url,
ssm_acc.region)
if err:
logging.error("create ssm client error: ", err.message)
return None, Error("create ssm HTTP client error: %s" % err.message)
# ่ทๅๅญๆฎๅ
ๅฎน
request = models.GetSecretValueRequest()
request.SecretName = secret_name
request.VersionId = "SSM_Current" # hard-code
rsp = None
try:
rsp = client.GetSecretValue(request)
except TencentCloudSDKException as e:
err = Error(str(e.args[0]))
print("ssm GetSecretValue error: " + err.message)
if err:
logging.error("ssm GetSecretValue error: " + err.message)
return None, Error("ssm GetSecretValue error: " + err.message)
return rsp.SecretString, None
def get_current_account(secret_name, ssm_acc):
"""่ทๅๅฝๅ่ดฆๅทไฟกๆฏ
:param secret_name: ๅญๆฎๅ็งฐ
:type secret_name: str
:param ssm_acc: SSM ่ดฆๅทไฟกๆฏ
:type ssm_acc: SsmAccount class
:rtype :DbAccount: ่ดฆๅทไฟกๆฏ
:rtype :error: ๅผๅธธๆฅ้ไฟกๆฏ
"""
# ่ทๅ secret_name ๅฏนๅบ็ๅญๆฎๅ
ๅฎน
secret_value, err = __get_current_product_secret_value(secret_name, ssm_acc)
if err:
logging.error("failed to GetSecretValue, err=" + err.message)
return None, err
# secret_value ๆฏ JSONๆ ผๅผ็ๅญ็ฌฆไธฒ๏ผๅฝขๅฆ๏ผ {"UserName":"test_user","Password":"test_pwd"}
print("secret value: ", secret_value)
if len(secret_value) == 0:
return None, Error("no valid account info found because secret value is empty")
current_user_and_password = eval(secret_value)
account = DbAccount(current_user_and_password["UserName"], current_user_and_password["Password"])
return account, None
| 29.779006 | 101 | 0.625974 | 663 | 5,390 | 4.897436 | 0.209653 | 0.027718 | 0.022174 | 0.029566 | 0.310748 | 0.276255 | 0.23283 | 0.18725 | 0.18725 | 0.160148 | 0 | 0.003064 | 0.273469 | 5,390 | 180 | 102 | 29.944444 | 0.826098 | 0.227087 | 0 | 0.181818 | 0 | 0 | 0.099741 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0.045455 | 0.068182 | 0 | 0.284091 | 0.034091 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
778e86e22df74cae729a35ebbd9cf3422b98571c | 5,159 | py | Python | bdd100k/eval/lane_test.py | siyliepfl/bdd100k | f38e9b5cd4e21f7a47822734ffa4d397f64bf04a | [
"BSD-3-Clause"
] | null | null | null | bdd100k/eval/lane_test.py | siyliepfl/bdd100k | f38e9b5cd4e21f7a47822734ffa4d397f64bf04a | [
"BSD-3-Clause"
] | null | null | null | bdd100k/eval/lane_test.py | siyliepfl/bdd100k | f38e9b5cd4e21f7a47822734ffa4d397f64bf04a | [
"BSD-3-Clause"
] | null | null | null | """Test cases for lane.py."""
import os
import unittest
from typing import Dict
import numpy as np
from .lane import (
eval_lane_per_threshold,
evaluate_lane_marking,
get_foreground,
get_lane_class,
sub_task_funcs,
)
class TestGetLaneClass(unittest.TestCase):
"""Test cases for the lane specific channel extraction."""
def test_partialled_classes(self) -> None:
"""Check the function that partial get_lane_class."""
for num in range(255):
byte = np.array(num, dtype=np.uint8)
if num & 8:
self.assertTrue(get_lane_class(byte, 1, 3, 1))
else:
self.assertTrue(get_lane_class(byte, 0, 3, 1))
self.assertTrue(get_foreground(byte))
if num & (1 << 5):
self.assertTrue(sub_task_funcs["direction"](byte, 1))
else:
self.assertTrue(sub_task_funcs["direction"](byte, 0))
if num & (1 << 4):
self.assertTrue(sub_task_funcs["style"](byte, 1))
else:
self.assertTrue(sub_task_funcs["style"](byte, 0))
class TestEvalLanePerThreshold(unittest.TestCase):
"""Test cases for the per image per threshold lane marking evaluation."""
def test_two_parallel_lines(self) -> None:
"""Check the correctness of the function in general cases."""
a = np.zeros((10, 10), dtype=np.bool)
b = np.zeros((10, 10), dtype=np.bool)
a[3, 3:7] = True
b[7, 3:7] = True
for radius in [1, 2, 3]:
self.assertAlmostEqual(eval_lane_per_threshold(a, b, radius), 0.0)
for radius in [4, 5, 6]:
self.assertAlmostEqual(eval_lane_per_threshold(a, b, radius), 1.0)
def test_two_vertical_lines(self) -> None:
"""Check the correctness of the function in general cases."""
a = np.zeros((10, 10), dtype=np.bool)
b = np.zeros((10, 10), dtype=np.bool)
a[3, 3:6] = True
b[5:8, 7] = True
self.assertAlmostEqual(eval_lane_per_threshold(a, b, 2), 0.0)
self.assertAlmostEqual(eval_lane_per_threshold(a, b, 3), 1 / 3)
self.assertAlmostEqual(eval_lane_per_threshold(a, b, 4), 2 / 3)
self.assertAlmostEqual(eval_lane_per_threshold(a, b, 5), 1.0)
def test_two_vertical_border_lines(self) -> None:
"""Check the correctness of the function in general cases."""
a = np.zeros((10, 10), dtype=np.bool)
b = np.zeros((10, 10), dtype=np.bool)
a[1:6, 1:4] = True
b[4:7, 3:8] = True
self.assertAlmostEqual(eval_lane_per_threshold(a, b, 2), 0.0)
self.assertAlmostEqual(eval_lane_per_threshold(a, b, 3), 0.4)
self.assertAlmostEqual(eval_lane_per_threshold(a, b, 4), 0.70588235)
self.assertAlmostEqual(eval_lane_per_threshold(a, b, 5), 1.0)
class TestEvaluateLaneMarking(unittest.TestCase):
"""Test cases for the evaluate_lane_marking function."""
def test_mock_cases(self) -> None:
"""Check the peformance of the mock case."""
cur_dir = os.path.dirname(os.path.abspath(__file__))
gt_dir = "{}/testcases/lane/gts".format(cur_dir)
res_dir = "{}/testcases/lane/res".format(cur_dir)
f_scores = evaluate_lane_marking(gt_dir, res_dir, bound_ths=[1, 2])
gt_f_scores: Dict[str, float] = {
"1.0_direction_parallel": 79.46877879291574,
"2.0_direction_parallel": 87.61816039690531,
"1.0_direction_vertical": 58.9375575858315,
"2.0_direction_vertical": 75.23632079381062,
"1.0_direction_avg": 100.0,
"2.0_direction_avg": 100.0,
"1.0_style_solid": 79.46877879291574,
"2.0_style_solid": 87.61816039690531,
"1.0_style_dashed": 58.9375575858315,
"2.0_style_dashed": 75.23632079381062,
"1.0_style_avg": 100.0,
"2.0_style_avg": 100.0,
"1.0_category_crosswalk": 88.24432582570225,
"2.0_category_crosswalk": 93.82889258902341,
"1.0_category_double_other": 99.01265721381078,
"2.0_category_double_other": 100.0,
"1.0_category_double_white": 100.0,
"2.0_category_double_white": 100.0,
"1.0_category_double_yellow": 100.0,
"2.0_category_double_yellow": 100.0,
"1.0_category_road_curb": 75.0,
"2.0_category_road_curb": 75.16008049762166,
"1.0_category_single_other": 59.173962031069706,
"2.0_category_single_other": 75.48380881221992,
"1.0_category_single_white": 100.0,
"2.0_category_single_white": 100.0,
"1.0_category_single_yellow": 89.27983318704442,
"2.0_category_single_yellow": 99.98725140234575,
"1.0_category_avg": 83.48815417369305,
"2.0_category_avg": 100.0,
"1.0_total_avg": 94.496051391231,
"2.0_total_avg": 100.0,
"average": 97.2480256956155,
}
for key, val in gt_f_scores.items():
self.assertAlmostEqual(val, f_scores[key])
if __name__ == "__main__":
unittest.main()
| 39.381679 | 78 | 0.611165 | 705 | 5,159 | 4.221277 | 0.212766 | 0.012769 | 0.040659 | 0.073925 | 0.480175 | 0.43918 | 0.319556 | 0.289987 | 0.266465 | 0.218078 | 0 | 0.137976 | 0.262454 | 5,159 | 130 | 79 | 39.684615 | 0.644152 | 0.087226 | 0 | 0.128713 | 0 | 0 | 0.159768 | 0.111945 | 0 | 0 | 0 | 0 | 0.178218 | 1 | 0.049505 | false | 0 | 0.049505 | 0 | 0.128713 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
778eae6f766166711785d06fe1c129980226b180 | 6,178 | py | Python | draw.py | squeezeday/raspberry-pi-e-ink-display | 97a16f7bd58ba6d6c800864c945a961870e823ed | [
"MIT"
] | 5 | 2019-09-10T09:57:59.000Z | 2021-08-17T18:10:21.000Z | draw.py | squeezeday/raspberry-pi-e-ink-display | 97a16f7bd58ba6d6c800864c945a961870e823ed | [
"MIT"
] | 5 | 2021-03-19T03:32:34.000Z | 2022-03-11T23:58:49.000Z | draw.py | squeezeday/raspberry-pi-e-ink-display | 97a16f7bd58ba6d6c800864c945a961870e823ed | [
"MIT"
] | null | null | null | from PIL import Image, ImageDraw, ImageFont
from urllib.request import Request, urlopen
from urllib.parse import quote, unquote
import json
import pytz
import os
from calendarhelper import getCaldavEvents, calendarEvent
from datetime import datetime, date, timedelta, tzinfo, timezone
from tzlocal import get_localzone
from dotenv import load_dotenv
load_dotenv()
import locale
locale.setlocale(locale.LC_ALL, os.getenv('LOCALE'))
home_assistant_base_url = os.getenv('HOME_ASSISTANT_BASE_URL')
home_assistant_access_token = os.getenv('HOME_ASSISTANT_ACCESS_TOKEN')
caldav_url = os.getenv('CALDAV_URL')
display_height = int(os.getenv('DISPLAY_WIDTH'))
display_width = int(os.getenv('DISPLAY_HEIGHT'))
localtimezone = get_localzone()
# cheat sheet https://cdn.materialdesignicons.com/4.3.95/
weather_icons = {
'cloudy': '%EF%96%90',
'fog': '%EF%96%91',
'hail': '%EF%96%92',
'hurricane': '%EF%A2%97',
'lightning': '%EF%96%93',
'lightning-rainy': '%EF%99%BD',
'night': '%EF%96%94',
'partlycloudy': '%EF%96%95',
'pouring': '%EF%96%96',
'rainy': '%EF%96%97',
'snowy': '%EF%96%98',
'snowy-rainy': '%EF%99%BE',
'sunny': "%EF%96%99",
'sunset': '%EF%96%9A',
'sunset-down': '%EF%96%9B',
'sunset-up': '%EF%96%9C',
'windy': '%EF%96%9D',
'thermometer': '\uF50F',
'humidity': '\uF58E',
'sunset': '\uF59B',
'sunrise': '\uF59C'
}
def get_ha_sensor_state(state):
try:
req = Request(home_assistant_base_url + state)
req.add_header('Authorization', 'Bearer ' + home_assistant_access_token)
content = urlopen(req).read()
sensor_data = json.loads(content.decode("utf-8"))
return sensor_data
except Exception as e:
print("Error reading " + state + ": " + str(e))
return None
def create_image():
# init black/white image
black_image = Image.new('1', (display_width, display_height), 255)
draw_black = ImageDraw.Draw(black_image)
# init fonts
fontForecastToday = ImageFont.truetype('materialdesignicons-webfont.ttf', 48)
fontForecast = ImageFont.truetype('materialdesignicons-webfont.ttf', 32)
fontThermometer = ImageFont.truetype('SourceSansPro-Bold.ttf', 36)
fontSun = ImageFont.truetype('SourceSansPro-Bold.ttf', 24)
fontEventToday = ImageFont.truetype('SourceSansPro-Regular.ttf', 26)
fontEvent = ImageFont.truetype('SourceSansPro-Regular.ttf', 24)
fontDateToday = ImageFont.truetype('SourceSansPro-Bold.ttf', 40)
fontDate = ImageFont.truetype('SourceSansPro-Bold.ttf', 26)
now = datetime.now().astimezone(localtimezone)
# get weather forecast
weather_data = get_ha_sensor_state('states/weather.smhi_home') # or states/weather.dark_sky
# get sunrise/sunset
sun_data = get_ha_sensor_state('states/sun.sun')['attributes']
# get sensor data
outdoor_sensor = get_ha_sensor_state('states/sensor.outdoor_2')
# get calendar events
events = getCaldavEvents(caldav_url)
# draw today
msg = now.strftime('%A %-d/%-m')
text_w, text_h = draw_black.textsize(msg, font = fontDateToday)
draw_black.text((10, 10), msg, font = fontDateToday, fill = 0)
# draw today's forecast
if weather_data is not None:
draw_black.text((245, 10), unquote(weather_icons[weather_data['attributes']['forecast'][0]['condition']]), font = fontForecastToday, fill = 0)
draw_black.text((295, 10), str(weather_data['attributes']['forecast'][0]['temperature']) + ' ยฐC' , font = fontThermometer, fill = 0)
# draw current outdoor temp
if outdoor_sensor is not None:
str_current_outdoor_temp = str(outdoor_sensor["state"] + ' ' + outdoor_sensor['attributes']['unit_of_measurement'])
text_w, text_h = draw_black.textsize(str_current_outdoor_temp, font = fontThermometer)
current_outdoor_temp_y = display_height - 10 - text_h
draw_black.text((35, current_outdoor_temp_y), str_current_outdoor_temp, font = fontThermometer, fill = 0)
draw_black.text((0, current_outdoor_temp_y+10), unquote(weather_icons[outdoor_sensor['attributes']['icon'][4:]]), font = fontForecast, fill = 0)
# draw sunrise/sunset hours
if sun_data is not None:
sunrise = pytz.utc.localize(datetime.strptime(sun_data['next_rising'][:-6], '%Y-%m-%dT%H:%M:%S'))
sunset = pytz.utc.localize(datetime.strptime(sun_data['next_setting'][:-6], '%Y-%m-%dT%H:%M:%S'))
draw_black.text((200, current_outdoor_temp_y+10), sunrise.astimezone(localtimezone).strftime("%H:%M",), font = fontSun, fill = 0)
draw_black.text((170, current_outdoor_temp_y+10), unquote(weather_icons['sunrise']), font = fontForecast, fill = 0)
draw_black.text((300, current_outdoor_temp_y+10), sunset.astimezone(localtimezone).strftime("%H:%M",), font = fontSun, fill = 0)
draw_black.text((270, current_outdoor_temp_y+10), unquote(weather_icons['sunset']), font = fontForecast, fill = 0)
max_y = display_height - 80
y = 30
date = now
dateutc = datetime(now.year, now.month, now.day, 0,0,0,0, timezone.utc) # FIXME: make timezone naive
for i in range(7):
if y > max_y:
break
# draw day and forecast
if i != 0:
draw_black.text((10, y), date.strftime("%A %-d/%-m"), font = fontDate, fill = 0)
if weather_data is not None:
draw_black.text((280, y), unquote(weather_icons[weather_data['attributes']['forecast'][i]['condition']]), font = fontForecast, fill = 0)
draw_black.text((320, y), str(weather_data['attributes']['forecast'][i]['temperature']) + ' ยฐC' , font = fontSun, fill = 0)
y+=30
# draw events for the day
if events is not None:
for ev in events:
eventStart = ev.datetimestart
eventEnd = ev.datetimeend
if y > max_y:
break
if not ev.date == date.date() and not eventStart <= dateutc < eventEnd:
continue
if not ev.allday:
row = "{} {}".format(eventStart.astimezone(localtimezone).strftime("%H:%M",), ev.summary)
else:
row = ev.summary
if i == 0:
font = fontEventToday
else:
font = fontEvent
draw_black.text((10,y), row, font = font, fill = 0)
y += 24
# increment
y += 12
date = date + timedelta(days=1)
dateutc = dateutc + timedelta(days=1)
return black_image
| 37.442424 | 148 | 0.683555 | 846 | 6,178 | 4.833333 | 0.288416 | 0.037417 | 0.04133 | 0.023967 | 0.340915 | 0.186843 | 0.154561 | 0.097334 | 0.047444 | 0.030325 | 0 | 0.03505 | 0.164131 | 6,178 | 164 | 149 | 37.670732 | 0.75639 | 0.058271 | 0 | 0.065574 | 0 | 0 | 0.175659 | 0.051198 | 0 | 0 | 0 | 0.006098 | 0 | 1 | 0.016393 | false | 0 | 0.090164 | 0 | 0.131148 | 0.008197 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
778f222399a540e39a58b6e8b7d53a1fe0cdcdb2 | 139,019 | py | Python | pysnmp-with-texts/ZXR10-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/ZXR10-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/ZXR10-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module ZXR10-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ZXR10-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:48:21 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
mgmt, Bits, NotificationType, enterprises, iso, Counter32, Counter64, IpAddress, MibIdentifier, TimeTicks, Unsigned32, ObjectIdentity, NotificationType, Integer32, Gauge32, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "mgmt", "Bits", "NotificationType", "enterprises", "iso", "Counter32", "Counter64", "IpAddress", "MibIdentifier", "TimeTicks", "Unsigned32", "ObjectIdentity", "NotificationType", "Integer32", "Gauge32", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
MacAddress, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "TextualConvention", "DisplayString")
zte = MibIdentifier((1, 3, 6, 1, 4, 1, 3902))
zxr10 = MibIdentifier((1, 3, 6, 1, 4, 1, 3902, 3))
class DisplayString(OctetString):
pass
class AvailStatus(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("out", 0), ("in", 1))
class OperStatus(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("down", 0), ("up", 1))
class PortProperty(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 3, 4))
namedValues = NamedValues(("photo", 0), ("electricity", 1), ("phoelecmix", 3), ("console", 4))
class MasterStatus(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("master", 1), ("slave", 2), ("member", 3))
class UnitRunStatus(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("down", 0), ("up", 1))
class PidUsedStatus(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("reserved", 0), ("used", 1))
class BoolStatus(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 0))
namedValues = NamedValues(("true", 1), ("false", 0))
class ProductID(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 417, 418, 419, 420, 5000))
namedValues = NamedValues(("zxr10RouterT128", 1), ("zxr10RouterT64", 2), ("zxr10RouterT32", 3), ("zxr10Routergar-2608", 4), ("zxr10Routerger8", 5), ("zxr10Routergar-2604", 6), ("zxr10SwitchT160G", 7), ("zxr10Routergar-3608", 8), ("zxr10Routergar-7208", 9), ("zxr10SwitchT64G", 10), ("zxr10Switch3206", 11), ("zxr10Switch3906", 12), ("zxr10Switch3228", 13), ("zxr10Switch3928", 14), ("zxr10Switch3252", 15), ("zxr10Switch3952", 16), ("zxr10Switch5224", 17), ("zxr10Switch5228", 18), ("zxr10Switch5228F", 19), ("zxr10Switch5928", 20), ("zxr10Switch5928F", 21), ("zxr10Switch5252", 22), ("zxr10Switch5952", 23), ("zxr10Switch3226", 24), ("zxr10SwitchT40G", 25), ("zxr10RouterT1200", 26), ("zxr10RouterT600", 27), ("zxr10Routerger2", 28), ("zxr10Routerger4", 29), ("zxr10Switch3226FI", 30), ("zxr10Switch3928A", 31), ("zxr10Switch3928AFI", 32), ("zxr10Switch3952A", 33), ("zxr10Switch3228A-EI", 34), ("zxr10Switch3228A", 35), ("zxr10Switch3228A-FI", 36), ("zxr10Switch3252A", 37), ("zxr10Switch5228A", 38), ("zxr10Switch5252A", 39), ("zxr10Switch3928E", 40), ("zxr10Switch3928E-FI", 41), ("zxr10Switch3952E", 42), ("zxr10Switch5952E", 43), ("zxr10RouterR10-1822-AC", 100), ("zxr10RouterR10-1822-DC", 101), ("zxr10RouterR10-1821-AC", 102), ("zxr10RouterR10-1821-DC", 103), ("zxr10RouterR10-1812-AC", 104), ("zxr10RouterR10-1812-DC", 105), ("zxr10RouterR10-1811-AC", 106), ("zxr10RouterR10-1811-DC", 107), ("zxr10RouterR10-1822E-AC", 108), ("zxr10RouterR10-1822E-DC", 109), ("zxr10RouterR10-1821E-AC", 110), ("zxr10RouterR10-1821E-DC", 111), ("zxr10RouterR10-1812E-AC", 112), ("zxr10RouterR10-1812E-DC", 113), ("zxr10RouterR10-1811E-AC", 114), ("zxr10RouterR10-1811E-DC", 115), ("zxr10RouterR10-3881-AC", 132), ("zxr10RouterR10-3882-AC", 133), ("zxr10RouterR10-3883-AC", 134), ("zxr10RouterR10-3884-AC", 135), ("zxr10RouterR10-3881-DC", 136), ("zxr10RouterR10-3882-DC", 137), ("zxr10RouterR10-3883-DC", 138), ("zxr10RouterR10-3884-DC", 139), ("zxr10RouterR10-3841-AC", 140), ("zxr10RouterR10-3842-AC", 141), ("zxr10RouterR10-3843-AC", 142), ("zxr10RouterR10-3844-AC", 143), ("zxr10RouterR10-3841-DC", 144), ("zxr10RouterR10-3842-DC", 145), ("zxr10RouterR10-3843-DC", 146), ("zxr10RouterR10-3844-DC", 147), ("zxr10RouterR10-3821-AC", 148), ("zxr10RouterR10-3822-AC", 149), ("zxr10RouterR10-3823-AC", 150), ("zxr10RouterR10-3824-AC", 151), ("zxr10RouterR10-3821-DC", 152), ("zxr10RouterR10-3822-DC", 153), ("zxr10RouterR10-3823-DC", 154), ("zxr10RouterR10-3824-DC", 155), ("zxr10RouterR10-2841-AC", 172), ("zxr10RouterR10-2842-AC", 173), ("zxr10RouterR10-2843-AC", 174), ("zxr10RouterR10-2844-AC", 175), ("zxr10RouterR10-2841-DC", 176), ("zxr10RouterR10-2842-DC", 177), ("zxr10RouterR10-2843-DC", 178), ("zxr10RouterR10-2844-DC", 179), ("zxr10RouterR10-2881-AC", 180), ("zxr10RouterR10-2882-AC", 181), ("zxr10RouterR10-2883-AC", 182), ("zxr10RouterR10-2884-AC", 183), ("zxr10RouterR10-2881-DC", 184), ("zxr10RouterR10-2882-DC", 185), ("zxr10RouterR10-2883-DC", 186), ("zxr10RouterR10-2884-DC", 187), ("zxr10RouterR10-2821-AC", 188), ("zxr10RouterR10-2822-AC", 189), ("zxr10RouterR10-2823-AC", 190), ("zxr10RouterR10-2824-AC", 191), ("zxr10RouterR10-2821-DC", 192), ("zxr10RouterR10-2822-DC", 193), ("zxr10RouterR10-2823-DC", 194), ("zxr10RouterR10-2824-DC", 195), ("zxr10RouterR10-1841-AC", 196), ("zxr10RouterR10-1842-AC", 197), ("zxr10RouterR10-1843-AC", 198), ("zxr10RouterR10-1844-AC", 199), ("zxr10RouterR10-1841-DC", 200), ("zxr10RouterR10-1842-DC", 201), ("zxr10RouterR10-1843-DC", 202), ("zxr10RouterR10-1844-DC", 203), ("zxr10Switch-6907", 400), ("zxr10Switch-T240G", 401), ("zxr10Switch-6902", 402), ("zxr10Switch-6905", 403), ("zxr10Switch-6908", 404), ("zxr10Switch-8902", 405), ("zxr10Switch-8905", 406), ("zxr10Switch-8908", 407), ("zxr10Switch-8912", 408), ("zxctn-6100", 409), ("zxr10Switch5928-PS", 417), ("zxr10Switch3928A-PS", 418), ("zxr10Switch3928E", 419), ("zxr10Switch3928E-FI", 420), ("zxr10UAS10600", 5000))
class BoardType(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("upc-board", 1), ("sfc-board", 2), ("npc-board", 3), ("mec-board", 4), ("smp-board", 5), ("mcp-board", 6))
class NpcType(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 542, 543, 544, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1235, 1236, 1237, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 2051, 2052, 2053, 2054, 2055, 2564, 2565, 2566), SingleValueConstraint(2567, 2568, 2569, 2570, 2571, 2572, 2573, 2574, 3077, 3078, 3079, 3080, 3081, 4103, 4104, 4609, 4610, 4611, 4616, 4617, 4865, 4866, 4867, 5129, 5130, 5131, 5132, 5133, 5134, 5135, 5136, 5137, 5138, 5139, 5642, 5643, 5644, 5645, 5646, 5647, 128, 129, 130, 131, 161, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 5001, 5101, 5102, 5103, 5201, 5301, 5401))
namedValues = NamedValues(("card-fei-8", 512), ("card-fei-f", 513), ("card-fei-b", 514), ("card-fei-o", 515), ("card-fei-e", 516), ("card-fei-1", 517), ("card-fei-b2", 518), ("card-fei-b-es", 519), ("card-fei-48", 520), ("card-fei-o8", 521), ("card-fei-44-4-e", 522), ("card-fei-44-4-e-ez", 523), ("card-fei-24", 524), ("card-fei-16", 525), ("card-fei-2", 526), ("card-fei-4", 527), ("card-fei-o-16", 528), ("card-fei-44-4-fi", 529), ("card-fei-44-4-fi-ez", 530), ("card-mfe-2", 531), ("card-fei-24e", 532), ("card-fei-24eo", 533), ("card-mefes", 534), ("card-mfef", 535), ("card-msgfs", 536), ("card-megfs", 537), ("card-msfes", 538), ("card-mfef-2", 539), ("card-fei-24-2-fi", 542), ("card-fei-o24-2-fi", 543), ("card-fei-48-4-fi", 544), ("card-gei-8", 1025), ("card-gei-l", 1026), ("card-gei-g-1", 1027), ("card-gei-g-2", 1028), ("card-gei-2", 1029), ("card-gei-o", 1030), ("card-gei-o-g", 1031), ("card-gei-e", 1032), ("card-gei-g-2-es", 1033), ("card-gei-12", 1034), ("card-gei-x", 1035), ("card-gei-h4", 1036), ("card-gei-h2", 1037), ("card-gei-24", 1038), ("card-gei-12-4e", 1039), ("card-gei-24-4e", 1040), ("card-gei-x-2", 1041), ("card-gei-x-1-ez", 1042), ("card-gei-x-ez", 1043), ("card-gei-12-4e-ez", 1044), ("card-gei-t2", 1045), ("card-gei-4-12e", 1046), ("card-gei-4-24e", 1047), ("card-gei-4-12e-ez", 1048), ("card-10gei-1", 1049), ("card-gei-1", 1050), ("card-gei-4o-24e-l", 1051), ("card-gei-24o-4e", 1052), ("card-gei-x-1o-w", 1053), ("card-gei-x-1o-la", 1054), ("card-gei-x-1e", 1055), ("card-gei-x-1e-d", 1056), ("card-gei-x-4o-48e", 1057), ("card-gei-f-2", 1058), ("card-gei-10", 1059), ("card-gei-4", 1060), ("card-gei-2e", 1061), ("card-gei-2o", 1062), ("card-gei-1o-1e", 1063), ("card-gei-8-p4", 1064), ("card-gei-8-p4-ez", 1065), ("card-gei-4b", 1066), ("card-gei-10-b", 1067), ("card-gei-4-b", 1068), ("card-gei-x-2ef", 1069), ("card-gei-x-4ef", 1070), ("card-gei-24ec", 1071), ("card-gei-24ef", 1072), ("card-gei-48ec", 1073), ("card-gei-48ef", 1074), ("ccard-gei-x2-n-ez", 1075), ("card-gei-24-n-ez", 1076), ("card-gei-x-2ef-lit", 1077), ("card-gei-x-4ef-lit", 1078), ("card-gei-24ec-lit", 1079), ("card-gei-24ef-lit", 1080), ("card-gei-48ec-lit", 1081), ("card-gei-48ef-lit", 1082), ("card-gei-x-2ef-den", 1083), ("card-gei-x-4ef-den", 1084), ("card-gei-24ec-den", 1085), ("card-gei-24ef-den", 1086), ("card-gei-48ec-den", 1087), ("card-gei-48ef-den", 1088), ("card-gei-24f-fg", 1089), ("card-gei-24f-fg-lit", 1090), ("card-gei-24f-fg-den", 1091), ("card-gei-24hec", 1092), ("card-gei-24hec-lit", 1093), ("card-gei-24hec-den", 1094), ("card-gei-24hef", 1095), ("card-gei-24hef-lit", 1096), ("card-gei-24hef-den", 1097), ("card-gei-24hf-fg", 1098), ("card-gei-24hf-fg-lit", 1099), ("card-gei-24hf-fg-den", 1100), ("card-gei-48f-fg", 1101), ("card-gei-48f-fg-lit", 1102), ("card-gei-48f-fg-den", 1103), ("card-gei-24ef-x11", 1104), ("card-gei-24ec-x11", 1105), ("card-gei-24hef-x11", 1106), ("card-gei-48ef-x11", 1107), ("card-gei-24hec-x11", 1108), ("card-gei-48ec-x11", 1109), ("card-gei-x-2ef-x11", 1110), ("card-gei-x-4ef-x11", 1111), ("card-wan-x", 1112), ("card-gei-4-d", 1113), ("card-gei-x-8ef", 1114), ("card-ugse-gei-48ec", 1115), ("card-ugse-gei-48ec-lit", 1116), ("card-ugse-gei-48ec-den", 1117), ("card-ugsf-gei-48ef", 1118), ("card-ugsf-gei-48ef-lit", 1119), ("card-ugsf-gei-48ef-den", 1120), ("card-umtf-gei-24ef", 1121), ("card-umtf-gei-24ef-lit", 1122), ("card-umtf-gei-24ef-den", 1123), ("card-umtf-gei-24ef-2x", 1124), ("card-umtf-gei-24ef-2x-lit", 1125), ("card-umtf-gei-24ef-2x-den", 1126), ("card-umtf-gei-24ef-ez", 1127), ("card-umtf-gei-24ef-lit-ez", 1128), ("card-umtf-gei-24ef-den-ez", 1129), ("card-umte-gei-24ef", 1130), ("card-umte-gei-24ef-lit", 1131), ("card-umte-gei-24ef-den", 1132), ("card-umte-gei-24ef-2x", 1133), ("card-umte-gei-24ef-2x-lit", 1134), ("card-umte-gei-24ef-2x-den", 1135), ("card-umte-gei-24ef-ez", 1136), ("card-umte-gei-24ef-lit-ez", 1137), ("card-umte-gei-24ef-den-ez", 1138), ("card-gei-e-sfp", 1139), ("card-gei-8f", 1140), ("card-uwsa8541-g-24", 1141), ("card-stack", 1142), ("card-gei-8e", 1143), ("card-umhf-gei-x-4ef", 1144), ("card-umhf-gei-x-4ef-1ez", 1145), ("card-umhf-gei-x-4ef-2ez", 1146), ("card-umhf-gei-x-4ef-1ez-left", 1147), ("card-umtf1-gei-l2ef", 1149), ("card-umtf1-gei-l2ef-lit", 1150), ("card-umtf1-gei-l2ef-den", 1151), ("card-umtf1-gei-l2ef-2x", 1152), ("card-umtf1-gei-l2ef-2x-lit", 1153), ("card-umtf1-gei-l2ef-2x-den", 1154), ("card-umtf1-gei-l2ef-ez", 1155), ("card-umtf1-gei-l2ef-lit-ez", 1156), ("card-umtf1-gei-l2ef-den-ez", 1157), ("card-umte1-gei-l2ef", 1158), ("card-umte1-gei-l2ef-lit", 1159), ("card-umte1-gei-l2ef-den", 1160), ("card-umte1-gei-l2ef-2x", 1161), ("card-umte1-gei-l2ef-2x-lit", 1162), ("card-umte1-gei-l2ef-2x-den", 1163), ("card-umte1-gei-l2ef-ez", 1164), ("card-umte1-gei-l2ef-lit-ez", 1165), ("card-umte1-gei-l2ef-den-ez", 1166), ("card-sygf-gei-12ef-2x", 1168), ("card-sygf-gei-12ef-2x-den", 1169), ("card-sygf-gei-12ef-2x-ez", 1170), ("card-sygf-gei-12ef-2x-den-ez", 1171), ("card-sygf-gei-12ef-2x-clock", 1172), ("card-sygf-gei-12ef-2x-den-clock", 1173), ("card-sygf-gei-12ef-2x-ez-clock", 1174), ("card-sygf-gei-12ef-2x-den-ez-clock", 1175), ("card-umhf-gei-x-2ef", 1184), ("card-umhf-gei-x-2ef-1ez", 1185), ("card-umhf-gei-x-2ef-2ez", 1186), ("card-umhf-gei-x-2ef-1ez-left", 1187), ("card-gei-4oi-48e", 1188), ("card-gei-4oi-24e", 1189), ("card-umop-12gefi-12epon", 1190), ("card-umop-12gefi-12epon-den", 1191), ("card-umop-12gefi-12epon-ez", 1192), ("card-umop-12gefi-12epon-den-ez", 1193), ("card-umop-12gefi-8epon", 1194), ("card-umop-12gefi-8epon-den", 1195), ("card-umop-12gefi-8epon-ez", 1196), ("card-umop-12gefi-8epon-den-ez", 1197), ("card-umop-12gefi-4epon", 1198), ("card-umop-12gefi-4epon-den", 1199), ("card-umop-12gefi-4epon-ez", 1200), ("card-umop-12gefi-4epon-den-ez", 1201), ("card-uxhf-2c-2xefi", 1202), ("card-uxhf-2c-2xefi-lit", 1203), ("card-uxhf-2c-2xefi-den", 1204), ("card-ge48a", 1205), ("card-ge24a", 1206), ("card-utqf-gei-12ef-1588-clock-no", 1207), ("card-utqf-gei-12ef-1588-clock-left", 1208), ("card-utqf-gei-12ef-1588-clock-right", 1209), ("card-utqf-gei-12ef-1588-clock", 1210), ("card-utqf-gei-12ef-1588-clock-no-den", 1211), ("card-utqf-gei-12ef-1588-clock-left-den", 1212), ("card-utqf-gei-12ef-1588-clock-right-den", 1213), ("card-utqf-gei-12ef-1588-clock-den", 1214), ("card-utqf-gei-12ef-1588-clock-no-ez", 1215), ("card-utqf-gei-12ef-1588-clock-left-ez", 1216), ("card-utqf-gei-12ef-1588-clock-right-ez", 1217), ("card-utqf-gei-12ef-1588-clock-ez", 1218), ("card-utqf-gei-12ef-1588-clock-no-den-ez", 1219), ("card-utqf-gei-12ef-1588-clock-left-den-ez", 1220), ("card-utqf-gei-12ef-1588-clock-right-den-ez", 1221), ("card-utqf-gei-12ef-1588-clock-den-ez", 1222), ("card-gei-48ef-den-replace-ufsf", 1223), ("card-gei-48ef-replace-ufsf", 1224), ("card-umtf1-gei-12ef-ez-replace-xgm", 1225), ("card-umtf1-gei-12ef-den-ez-replace-xgm", 1226), ("card-ge24d", 1227), ("card-UVSF-H3", 1228), ("card-UVFF-H3", 1229), ("card-UWSF-H3", 1230), ("card-UVTF-H3", 1235), ("card-USFF-H3", 1236), ("card-SF-24GE-H3", 1237), ("card-oc3", 1538), ("card-oc3-4", 1539), ("card-oc12", 1540), ("card-oc48", 1541), ("card-oc48-2", 1542), ("card-oc3-c", 1543), ("card-oc48-c", 1544), ("card-oc48-dl", 1545), ("card-oc48-sg", 1546), ("card-oc48-2-dl", 1547), ("card-oc3-8", 1548), ("card-oc192-1-ez", 1549), ("card-oc3-8-pm5351", 1550), ("card-oc3-c-2", 1551), ("card-oc192b-1-ez", 1552), ("card-oc48f", 1553), ("card-oc48-4", 1554), ("card-atm155", 2051), ("card-atm622", 2052), ("card-atm155-8", 2053), ("card-atm155-rev", 2054), ("card-atm155-4", 2055), ("card-e1-16", 2564), ("card-e1-32", 2565), ("card-e1-c-32", 2566)) + NamedValues(("card-e1-c", 2567), ("card-e1-c-4", 2568), ("card-e1-c-8", 2569), ("card-e1-c-8-ixbus", 2570), ("card-e1-c-1", 2571), ("card-e1-c-2", 2572), ("card-e1vi", 2573), ("card-e1-c-8-pm7366", 2574), ("card-t1-c-4", 3077), ("card-t1-4", 3078), ("card-t1-c-1", 3079), ("card-t1-c-2", 3080), ("card-t1-c-8-pm7366", 3081), ("card-t3-c", 4103), ("card-t3", 4104), ("card-smb", 4609), ("card-e1te", 4610), ("card-manage-qx-lct", 4611), ("card-e3-c", 4616), ("card-e3", 4617), ("card-sygf-clkc-mmpi", 4865), ("card-utqf-tphy-mmpi", 4866), ("card-bpts", 4867), ("card-lhs", 5129), ("card-aux", 5130), ("card-lfxs", 5131), ("card-lhsu", 5132), ("card-lfxo", 5133), ("card-lffxs-4", 5134), ("card-lffxs-2", 5135), ("card-ndec", 5136), ("card-mhs8", 5137), ("card-mhs16", 5138), ("card-mndec", 5139), ("card-mpc-tmcs-non-hg", 5642), ("card-mpc-tmcs-8-hg", 5643), ("card-mpc-umcs-12-hg", 5644), ("card-mpc-umcs-16-hg", 5645), ("card-mpc-umct-24-hg", 5646), ("card-mpc-umcu-12-hg", 5647), ("card-manage", 128), ("card-manage-spec", 129), ("card-manage-qx", 130), ("card-manage-lct", 131), ("card-loopback", 161), ("card-gre", 163), ("card-tunnel", 164), ("card-mppp", 165), ("card-vlan", 166), ("card-null", 167), ("card-port-channel", 168), ("card-supervlan", 169), ("card-dialer", 170), ("card-vbui", 171), ("card-virtual-template", 173), ("card-virtual-access", 174), ("card-superpvc", 175), ("card-l2tpdialer", 176), ("card-qinq", 177), ("card-superqinq", 178), ("card-uni", 179), ("card-ces", 180), ("card-cip", 181), ("card-vip", 182), ("upc-board", 5001), ("sfc-board", 5101), ("sfc2-board", 5102), ("sfc3-board", 5103), ("smp-board", 5201), ("mcp-board", 5301), ("mec-board", 5401))
class PortType(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(512, 1025, 1026, 1027, 1028, 1061, 1062, 1063, 1190, 1538, 1539, 1540, 1541, 1542, 1543, 2051, 2052, 2564, 2565, 2566, 2567, 2568, 3077, 3078, 3590, 3591, 3592, 3593, 4103, 4104, 4616, 4617, 5129, 5130, 5131, 5132, 5133, 5134, 5135, 5136, 5137, 5138, 5139, 5140, 5141, 5142, 5143, 128, 129, 161, 163, 164, 165, 166, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182))
namedValues = NamedValues(("port-fei", 512), ("port-gei", 1025), ("port-gei-x", 1026), ("port-wan-x", 1027), ("port-stack", 1028), ("port-gei-2e", 1061), ("port-gei-2o", 1062), ("port-gei-1o-1e", 1063), ("port-gei-epon", 1190), ("port-oc3", 1538), ("port-oc12", 1539), ("port-oc48", 1540), ("port-oc3-c", 1541), ("port-oc48-c", 1542), ("port-oc192", 1543), ("port-atm155", 2051), ("port-atm622", 2052), ("port-e1", 2564), ("port-e1-c", 2565), ("port-e1-c-ixbus", 2566), ("port-e1vi", 2567), ("port-e1vi-serial", 2568), ("port-t1", 3077), ("port-t1-c", 3078), ("port-tdm-e1", 3590), ("port-tdm-ether", 3591), ("port-tdm-inte-ether", 3592), ("port-tdm-t1", 3593), ("port-t3", 4103), ("port-t3-c", 4104), ("port-e3", 4616), ("port-e3-c", 4617), ("port-lhs", 5129), ("port-aux", 5130), ("port-lfxs", 5131), ("port-lhsu", 5132), ("port-lfxo", 5133), ("port-async", 5134), ("port-lffxs-2", 5135), ("port-ndec", 5136), ("port-mndec", 5137), ("port-timer1588", 5138), ("port-timer-ext", 5139), ("port-manage-qx", 5140), ("port-manage-lct", 5141), ("port-timer-abs", 5142), ("port-gps", 5143), ("port-manage", 128), ("port-manage-spec", 129), ("port-loopback", 161), ("port-gre", 163), ("port-tunnel", 164), ("port-mppp", 165), ("port-vlan", 166), ("port-null", 167), ("port-port-channel", 168), ("port-supervlan", 169), ("port-dialer", 170), ("port-vbui", 171), ("port-virtual-template", 173), ("port-virtual-access", 174), ("port-superpvc", 175), ("port-l2tpdialer", 176), ("port-qinq", 177), ("port-superqinq", 178), ("port-uni", 179), ("port-ces", 180), ("port-cip", 181), ("port-vip", 182))
class AlarmType(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 128, 129, 130, 131))
namedValues = NamedValues(("hardware-environment", 1), ("hardware-board", 2), ("hardware-port", 3), ("softprotocol-ros", 65), ("softprotocol-database", 66), ("softprotocol-oam", 67), ("softprotocol-security", 68), ("softprotocol-ospf", 69), ("softprotocol-rip", 70), ("softprotocol-bgp", 71), ("softprotocol-drp", 72), ("softprotocol-tcp-udp", 73), ("softprotocol-ip", 74), ("softprotocol-igmp", 75), ("softprotocol-telnet", 76), ("softprotocol-udp", 77), ("softprotocol-arp", 78), ("softprotocol-isis", 79), ("softprotocol-icmp", 80), ("softprotocol-snmp", 81), ("softprotocol-rmon", 82), ("softprotocol-nat", 83), ("softprotocol-urpf", 84), ("softprotocol-vswitch", 85), ("softprotocol-acl", 86), ("softprotocol-vrrp", 87), ("softprotocol-ppp", 88), ("softprotocol-scan", 89), ("softprotocol-mac", 90), ("softprotocol-alg", 91), ("softprotocol-loopdetect", 92), ("softprotocol-session", 93), ("softprotocol-dhcp", 94), ("softprotocol-mld", 95), ("softprotocol-stp", 96), ("softprotocol-vlan", 97), ("softprotocol-local-accounting", 98), ("softprotocol-radius", 99), ("softprotocol-ldp", 100), ("softprotocol-amat", 101), ("softprotocol-l2vpn", 102), ("softprotocol-rsvp", 103), ("softprotocol-zesr", 104), ("softprotocol-igmp-snooping", 105), ("softprotocol-fr", 106), ("softprotocol-ethoam", 107), ("softprotocol-ssh", 108), ("softprotocol-tdm", 109), ("softprotocol-qos", 110), ("softprotocol-tacacs", 111), ("softprotocol-aaa", 112), ("softprotocol-ipv6", 113), ("softprotocol-pim", 114), ("softprotocol-fw", 115), ("softprotocol-mux", 116), ("softprotocol-udld", 117), ("softprotocol-mix", 118), ("softprotocol-bfd", 119), ("softprotocol-cfm", 120), ("softprotocol-zess", 128), ("statistics-aps", 129), ("statistics-pdh-2m", 130), ("statistics-epon", 131))
class PortWorkingType(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("auto-config", 0), ("full-duplex", 1), ("half-duplex", 2))
class ShelfAttrib(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("workshelf", 1), ("fanshelf", 2), ("powershelf", 3), ("environshelf", 4))
class HostAttr(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("global", 0), ("mng", 1), ("vrf", 2))
class SystemDeviceType(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("single", 0), ("stack", 1))
zxr10systemconfig = MibIdentifier((1, 3, 6, 1, 4, 1, 3902, 3, 1))
zxr10rack = MibIdentifier((1, 3, 6, 1, 4, 1, 3902, 3, 2))
zxr10SystemID = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 1), ProductID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemID.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemID.setDescription('An integer description of the zxr10SystemID.')
zxr10SystemserialNo = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemserialNo.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemserialNo.setDescription('the serial no of the current system.')
zxr10SystemDescrip = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemDescrip.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemDescrip.setDescription('A texal description for the zxr10 System config information,The character must be printable .')
zxr10SystemTrapHost = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zxr10SystemTrapHost.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemTrapHost.setDescription('the ip address of trap host.')
zxr10SystemTrapCommunity = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zxr10SystemTrapCommunity.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemTrapCommunity.setDescription('the community of trap. ')
zxr10SystemVersion = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemVersion.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemVersion.setDescription('The version of current system.')
zxr10SystemVpnName = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zxr10SystemVpnName.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemVpnName.setDescription('The Vrf VPN name of SNMP trap host.')
zxr10SystemHostAttr = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 8), HostAttr()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zxr10SystemHostAttr.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemHostAttr.setDescription('The Vrf VPN name of SNMP trap host.')
zxr10SystemEnableSecret = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(3, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: zxr10SystemEnableSecret.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemEnableSecret.setDescription('Specifies an ENCRYPTED secret.')
zxr10SystemDeviceType = MibScalar((1, 3, 6, 1, 4, 1, 3902, 3, 1, 12), SystemDeviceType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemDeviceType.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemDeviceType.setDescription('The device is stack device or single device. 1: stack device 0: single device')
zxr10rackTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 2, 1), )
if mibBuilder.loadTexts: zxr10rackTable.setStatus('current')
if mibBuilder.loadTexts: zxr10rackTable.setDescription('the description of zxr10 rackinfomation it is a list of rack entries.')
zxr10rackEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 2, 1, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10RackNo"))
if mibBuilder.loadTexts: zxr10rackEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10rackEntry.setDescription('A rack entry containing objects that descript the rack infomation,such as: rack no.,rack status,rack attribute.')
zxr10RackNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10RackNo.setStatus('current')
if mibBuilder.loadTexts: zxr10RackNo.setDescription('the no. of the special rack.')
zxr10RackAttrib = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 1, 1, 2), MasterStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10RackAttrib.setStatus('current')
if mibBuilder.loadTexts: zxr10RackAttrib.setDescription('the attribute. of the special rack. eg ,the current rack is a master rack, we use 1,else if the rack is slave we use 2 .')
zxr10RackAvailStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 1, 1, 3), AvailStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10RackAvailStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10RackAvailStatus.setDescription('the status. of the special rack. eg ,the current rack is running, we use 1,else if the rack is down, we use 0 .')
zxr10shelfTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 2, 2), )
if mibBuilder.loadTexts: zxr10shelfTable.setStatus('current')
if mibBuilder.loadTexts: zxr10shelfTable.setDescription('the description of zxr10 shelf infomation it is a list of shelf entries.')
zxr10shelfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 2, 2, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10RackNo"), (0, "ZXR10-MIB", "zxr10ShelfNo"))
if mibBuilder.loadTexts: zxr10shelfEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10shelfEntry.setDescription('A shelf entry containing objects that descript the shelf infomation,such as: zxr10ShelfRackNo,zxr10ShelfNo , zxr10ShelfAttrib,zxr10ShelfAvailStatus.')
zxr10ShelfNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10ShelfNo.setStatus('current')
if mibBuilder.loadTexts: zxr10ShelfNo.setDescription('the no. of the special shelf.')
zxr10ShelfAttrib = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10ShelfAttrib.setStatus('current')
if mibBuilder.loadTexts: zxr10ShelfAttrib.setDescription('the attribute. of the special rack. eg ,the current shelf is a working shelf, we use 1,else if the shelf is a fan shelf we use 2 ,etc.')
zxr10ShelfAvailStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 2, 1, 3), AvailStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10ShelfAvailStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10ShelfAvailStatus.setDescription('the status. of the special shelf. eg ,the current shelf is in use, we use 1,else if the shelfis unuse, we use 0 .')
zxr10slotTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3), )
if mibBuilder.loadTexts: zxr10slotTable.setStatus('current')
if mibBuilder.loadTexts: zxr10slotTable.setDescription('the description of zxr10 card infomation it is a list of card entries.')
zxr10slotEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10RackNo"), (0, "ZXR10-MIB", "zxr10ShelfNo"), (0, "ZXR10-MIB", "zxr10PosInRack"))
if mibBuilder.loadTexts: zxr10slotEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10slotEntry.setDescription('A card entry containing objects that descript the card infomation,such as: zxr10PaneNo, zxr10SlotBoardType,zxr10SlotPortsNumber, zxr10SlotAvailStatus,zxr10SlotOperStatus, zxr10SlotMasterStatus.')
zxr10PaneNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PaneNo.setStatus('current')
if mibBuilder.loadTexts: zxr10PaneNo.setDescription('the no of the slot where the special card is inserted .')
zxr10PosInRack = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PosInRack.setStatus('current')
if mibBuilder.loadTexts: zxr10PosInRack.setDescription('the position where the special card is stored in the rack.')
zxr10SlotBoardType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 3), BoardType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SlotBoardType.setStatus('current')
if mibBuilder.loadTexts: zxr10SlotBoardType.setDescription('the type of the card( or board).')
zxr10SlotNPCType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 4), NpcType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SlotNPCType.setStatus('current')
if mibBuilder.loadTexts: zxr10SlotNPCType.setDescription('the NPC type of the board.')
zxr10SlotPortsNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SlotPortsNumber.setStatus('current')
if mibBuilder.loadTexts: zxr10SlotPortsNumber.setDescription('it descripts how many ports are in the card .')
zxr10SlotAvailStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 6), AvailStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SlotAvailStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10SlotAvailStatus.setDescription('the status. of the special card. eg ,the current rack is running, we use 1,else if the card is down, we use 0 . 1 indicate board in slot, 0 indicate board not in slot.')
zxr10SlotOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 7), OperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SlotOperStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10SlotOperStatus.setDescription("the operstatus. of the special card. eg ,the current card can work, we use 1, else if the card can't,we use 0 . 1 indicate 'mpu and rpu both run normally', 0 indicate 'error' for upc board, 1 indicate 'upc communicate with them(sfc,upc) normally', 0 indicate 'error' for sfc npc board.")
zxr10SlotMasterStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 8), MasterStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SlotMasterStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10SlotMasterStatus.setDescription('as for board upc and board sfc , master indicates masterStatus, slave indicates slaveStatus, for npc,only one status : master status')
zxr10ReceiveTick = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 9), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10ReceiveTick.setStatus('current')
if mibBuilder.loadTexts: zxr10ReceiveTick.setDescription('The last ticks of receving the board information .')
zxr10SlotTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SlotTemperature.setStatus('current')
if mibBuilder.loadTexts: zxr10SlotTemperature.setDescription('The temperature of the board .')
zxr10SubcardMax = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SubcardMax.setStatus('current')
if mibBuilder.loadTexts: zxr10SubcardMax.setDescription('It descripts how many sub slots which can load the sub card in the board.')
zxr10BootVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10BootVersion.setStatus('current')
if mibBuilder.loadTexts: zxr10BootVersion.setDescription('It descripts the bootrom version in the board.')
zxr10PCBVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PCBVersion.setStatus('current')
if mibBuilder.loadTexts: zxr10PCBVersion.setDescription('It descripts the PCB version in the board.')
zxr10FPGAVer = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 14), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10FPGAVer.setStatus('current')
if mibBuilder.loadTexts: zxr10FPGAVer.setDescription('It descripts the FPGA version in the board.')
zxr10McodeVer = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 15), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10McodeVer.setStatus('current')
if mibBuilder.loadTexts: zxr10McodeVer.setDescription('It descripts the Mcode version in the board.')
zxr10MasterQDRSRAMSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10MasterQDRSRAMSize.setStatus('current')
if mibBuilder.loadTexts: zxr10MasterQDRSRAMSize.setDescription('It descripts the size of QDRSRAM.')
zxr10SlaveQDRSRAMSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SlaveQDRSRAMSize.setStatus('current')
if mibBuilder.loadTexts: zxr10SlaveQDRSRAMSize.setDescription('It descripts the size of QDRSRAM.')
zxr10camSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10camSize.setStatus('current')
if mibBuilder.loadTexts: zxr10camSize.setDescription('It descripts the size of cam.')
zxr10BoardSilkLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 3, 1, 19), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10BoardSilkLabel.setStatus('current')
if mibBuilder.loadTexts: zxr10BoardSilkLabel.setDescription('It descripts the silk label of the board.')
zxr10portTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4), )
if mibBuilder.loadTexts: zxr10portTable.setStatus('current')
if mibBuilder.loadTexts: zxr10portTable.setDescription('the description of port infomation in the zxr10 card . it is a list of port entries.')
zxr10portEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10RackNo"), (0, "ZXR10-MIB", "zxr10ShelfNo"), (0, "ZXR10-MIB", "zxr10PosInRack"), (0, "ZXR10-MIB", "zxr10PortNo"))
if mibBuilder.loadTexts: zxr10portEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10portEntry.setDescription('A port entry containing objects that descript the port infomation,such as: zxr10PortNo zxr10PortType zxr10PortWorkingType zxr10PortMTU zxr10PortSpeed zxr10PortAvailStatus zxr10PortOperStatus.')
zxr10PortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortIfIndex.setStatus('current')
if mibBuilder.loadTexts: zxr10PortIfIndex.setDescription('the IfIndex of the port on the card.')
zxr10PortNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortNo.setStatus('current')
if mibBuilder.loadTexts: zxr10PortNo.setDescription('the no. of the port on the card, which contains several ports .')
zxr10PortType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 3), PortType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortType.setStatus('current')
if mibBuilder.loadTexts: zxr10PortType.setDescription('the type of the port.')
zxr10PortWorkingType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 4), PortWorkingType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortWorkingType.setStatus('current')
if mibBuilder.loadTexts: zxr10PortWorkingType.setDescription('auto-mode(0) indicate the port working type is auto mode, full-duplex(1) indicate the port working type is full duplex, half-duplex(2) indicate the port working type is half duplex.')
zxr10PortMTU = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortMTU.setStatus('current')
if mibBuilder.loadTexts: zxr10PortMTU.setDescription('the mtu of this port .')
zxr10PortSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortSpeed.setStatus('current')
if mibBuilder.loadTexts: zxr10PortSpeed.setDescription('the transmit speed of this port, unit is Mbps.')
zxr10PortAvailStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 7), AvailStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortAvailStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10PortAvailStatus.setDescription('the status. of the special port. eg ,the current port is running, we use 1,else if the card is down, we use 0 . 1 indicate physical level initialize ok 0 indicate physical level initialize not ok ')
zxr10PortOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 8), OperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortOperStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10PortOperStatus.setDescription("the operstatus. of the special card. eg ,the current card can work, we use 1, else if the card can't,we use 0 . for upc board: 1 indicate mpu and rpu both run normally, 0 indicate error, for sfc npc board: 1 indicate upc communicate with them(sfc,upc) normally, 0 indicate error.")
zxr10PortProtocolStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 9), OperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortProtocolStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10PortProtocolStatus.setDescription('0: down 1: up')
zxr10PortProperty = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 10), PortProperty()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortProperty.setStatus('current')
if mibBuilder.loadTexts: zxr10PortProperty.setDescription('0: photo 1: electricity 3: phoelecmix 4: console')
zxr10PortDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 2, 4, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10PortDesc.setStatus('current')
if mibBuilder.loadTexts: zxr10PortDesc.setDescription('the description of this port.')
zxr10_alarm = MibIdentifier((1, 3, 6, 1, 4, 1, 3902, 3, 4)).setLabel("zxr10-alarm")
zxr10HardwareAlarmTrapTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6), )
if mibBuilder.loadTexts: zxr10HardwareAlarmTrapTable.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmTrapTable.setDescription('A hardware alarm list for sending trap information.')
zxr10HardwareAlarmTrapEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10HardwareAlarmSlotNo"), (0, "ZXR10-MIB", "zxr10HardwareAlarmCode"))
if mibBuilder.loadTexts: zxr10HardwareAlarmTrapEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmTrapEntry.setDescription('An entry to the hardware alarm trap table.')
zxr10HardwareAlarmRackNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmRackNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmRackNo.setDescription('the rack no. ')
zxr10HardwareAlarmShelfNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmShelfNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmShelfNo.setDescription('the shelf no. ')
zxr10HardwareAlarmSlotNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmSlotNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmSlotNo.setDescription('the slot no. ')
zxr10HardwareAlarmPortNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmPortNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmPortNo.setDescription('the port no. ')
zxr10HardwareAlarmCode = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmCode.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmCode.setDescription('the hardware alarm code . ')
zxr10HardwareAlarmLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmLevel.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmLevel.setDescription('the hardware alarm level. ')
zxr10HardwareAlarmTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 7), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmTime.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmTime.setDescription('Total running-time since the Router System up when the hardware alarm occurred. ')
zxr10HardwareAlarmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 8), BoolStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmStatus.setDescription('the hardware alarm status. (true or 1 ) RAISED , (false or 0) CLEARED')
zxr10HardwareAlarmType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 9), AlarmType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmType.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmType.setDescription('the hardware alarm type. ')
zxr10HardwareAlarmVariableValue = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmVariableValue.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmVariableValue.setDescription('the hardware alarm variable value. ')
zxr10HardwareAlarmValueRisingThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmValueRisingThreshold.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmValueRisingThreshold.setDescription('the hardware alarm variable rising value. ')
zxr10HardwareAlarmValueFallingThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmValueFallingThreshold.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmValueFallingThreshold.setDescription('the hardware alarm variable falling value. ')
zxr10HardwareAlarmDescrip = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 6, 1, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareAlarmDescrip.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmDescrip.setDescription('the hardware alarm description. ')
zxr10SoftProtocolAlarmTrapTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7), )
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTrapTable.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTrapTable.setDescription('A softprotocol alarm list for sending trap information.')
zxr10SoftProtocolAlarmTrapEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10SoftProtocolAlarmSlotNo"), (0, "ZXR10-MIB", "zxr10SoftProtocolAlarmCode"))
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTrapEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTrapEntry.setDescription('An entry to the softprotocol alarm trap table.')
zxr10SoftProtocolAlarmRackNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmRackNo.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmRackNo.setDescription('the rack no . ')
zxr10SoftProtocolAlarmShelfNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmShelfNo.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmShelfNo.setDescription('the shelf no . ')
zxr10SoftProtocolAlarmSlotNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSlotNo.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSlotNo.setDescription('the slot no . ')
zxr10SoftProtocolAlarmCode = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmCode.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmCode.setDescription('the softprotocol alarm code . ')
zxr10SoftProtocolAlarmLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmLevel.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmLevel.setDescription('the softprotocol alarm level. ')
zxr10SoftProtocolAlarmTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 6), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTime.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTime.setDescription('the total running-time after the Router System up when the softprotocol alarm occurred. ')
zxr10SoftProtocolAlarmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 7), BoolStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmStatus.setDescription('the softprotocol alarm status: (true or 1) RAISED , (false or 0) CLEARED . ')
zxr10SoftProtocolAlarmType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 8), AlarmType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmType.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmType.setDescription('the softprotocol alarm type. ')
zxr10SoftProtocolAlarmDescrip = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 7, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmDescrip.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmDescrip.setDescription('the softprotocol alarm description. ')
zxr10StatisticsAlarmTrapTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8), )
if mibBuilder.loadTexts: zxr10StatisticsAlarmTrapTable.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmTrapTable.setDescription('A statistics alarm list for sending trap information.')
zxr10StatisticsAlarmTrapEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10StatisticsAlarmSlotNo"), (0, "ZXR10-MIB", "zxr10StatisticsAlarmCode"))
if mibBuilder.loadTexts: zxr10StatisticsAlarmTrapEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmTrapEntry.setDescription('An entry to the statistics alarm trap table.')
zxr10StatisticsAlarmRackNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmRackNo.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmRackNo.setDescription('the rack no . ')
zxr10StatisticsAlarmShelfNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmShelfNo.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmShelfNo.setDescription('the shelf no . ')
zxr10StatisticsAlarmSlotNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmSlotNo.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmSlotNo.setDescription('the slot no . ')
zxr10StatisticsAlarmPortNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmPortNo.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmPortNo.setDescription('the port no. ')
zxr10StatisticsAlarmCode = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmCode.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmCode.setDescription('the statistics alarm code . ')
zxr10StatisticsAlarmLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmLevel.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmLevel.setDescription('the statistics alarm level. ')
zxr10StatisticsAlarmTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 7), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmTime.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmTime.setDescription('the total running-time after the Router System up when the statistics alarm occurred. ')
zxr10StatisticsAlarmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 8), BoolStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmStatus.setDescription('the statistics alarm status: (true or 1) RAISED , (false or 0) CLEARED . ')
zxr10StatisticsAlarmType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 9), AlarmType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmType.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmType.setDescription('the statistics alarm sub type. ')
zxr10StatisticsAlarmVariableValue = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmVariableValue.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmVariableValue.setDescription('the statistics alarm variable value. ')
zxr10StatisticsAlarmValueRisingThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmValueRisingThreshold.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmValueRisingThreshold.setDescription('the statistics alarm variable rising value. ')
zxr10StatisticsAlarmValueFallingThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmValueFallingThreshold.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmValueFallingThreshold.setDescription('the statistics alarm variable falling value. ')
zxr10StatisticsAlarmDescrip = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 8, 1, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmDescrip.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmDescrip.setDescription('the statistics alarm description. ')
zxr10AlarmTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 3902, 3, 4, 20))
zxr10HardwareAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 3902, 3, 4, 20, 1)).setObjects(("ZXR10-MIB", "zxr10HardwareAlarmRackNo"), ("ZXR10-MIB", "zxr10HardwareAlarmShelfNo"), ("ZXR10-MIB", "zxr10HardwareAlarmSlotNo"), ("ZXR10-MIB", "zxr10HardwareAlarmPortNo"), ("ZXR10-MIB", "zxr10HardwareAlarmCode"), ("ZXR10-MIB", "zxr10HardwareAlarmLevel"), ("ZXR10-MIB", "zxr10HardwareAlarmTime"), ("ZXR10-MIB", "zxr10HardwareAlarmStatus"), ("ZXR10-MIB", "zxr10HardwareAlarmType"), ("ZXR10-MIB", "zxr10HardwareAlarmVariableValue"), ("ZXR10-MIB", "zxr10HardwareAlarmValueRisingThreshold"), ("ZXR10-MIB", "zxr10HardwareAlarmValueFallingThreshold"), ("ZXR10-MIB", "zxr10HardwareAlarmDescrip"))
if mibBuilder.loadTexts: zxr10HardwareAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareAlarmTrap.setDescription('when a hardware alarm occurred, send the trap PDU to uniform network manager.')
zxr10SoftProtocolAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 3902, 3, 4, 20, 2)).setObjects(("ZXR10-MIB", "zxr10SoftProtocolAlarmRackNo"), ("ZXR10-MIB", "zxr10SoftProtocolAlarmShelfNo"), ("ZXR10-MIB", "zxr10SoftProtocolAlarmSlotNo"), ("ZXR10-MIB", "zxr10SoftProtocolAlarmCode"), ("ZXR10-MIB", "zxr10SoftProtocolAlarmLevel"), ("ZXR10-MIB", "zxr10SoftProtocolAlarmTime"), ("ZXR10-MIB", "zxr10SoftProtocolAlarmStatus"), ("ZXR10-MIB", "zxr10SoftProtocolAlarmType"), ("ZXR10-MIB", "zxr10SoftProtocolAlarmDescrip"))
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTrap.setDescription('when a softprotocol alarm occurred, send the trap PDU to unm. ')
zxr10StatisticsAlarmTrap = NotificationType((1, 3, 6, 1, 4, 1, 3902, 3, 4, 20, 3)).setObjects(("ZXR10-MIB", "zxr10StatisticsAlarmRackNo"), ("ZXR10-MIB", "zxr10StatisticsAlarmShelfNo"), ("ZXR10-MIB", "zxr10StatisticsAlarmSlotNo"), ("ZXR10-MIB", "zxr10StatisticsAlarmPortNo"), ("ZXR10-MIB", "zxr10StatisticsAlarmCode"), ("ZXR10-MIB", "zxr10StatisticsAlarmLevel"), ("ZXR10-MIB", "zxr10StatisticsAlarmTime"), ("ZXR10-MIB", "zxr10StatisticsAlarmStatus"), ("ZXR10-MIB", "zxr10StatisticsAlarmType"), ("ZXR10-MIB", "zxr10StatisticsAlarmVariableValue"), ("ZXR10-MIB", "zxr10StatisticsAlarmValueRisingThreshold"), ("ZXR10-MIB", "zxr10StatisticsAlarmValueFallingThreshold"), ("ZXR10-MIB", "zxr10StatisticsAlarmDescrip"))
if mibBuilder.loadTexts: zxr10StatisticsAlarmTrap.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmTrap.setDescription('when a statistics alarm occurred, send the trap PDU to uniform network manager. ')
zxr10_statistics = MibIdentifier((1, 3, 6, 1, 4, 1, 3902, 3, 3)).setLabel("zxr10-statistics")
zxr10SystemUnitTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1), )
if mibBuilder.loadTexts: zxr10SystemUnitTable.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemUnitTable.setDescription('A list of the system statistics information.')
zxr10SystemUnitEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10SystemUnitIndex"))
if mibBuilder.loadTexts: zxr10SystemUnitEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemUnitEntry.setDescription('An entry to the sysem statistics information table.')
zxr10SystemUnitIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemUnitIndex.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemUnitIndex.setDescription('the index of system Unit ')
zxr10SystemUnitRunStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 2), UnitRunStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemUnitRunStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemUnitRunStatus.setDescription('the run status of system Unit 1 is running, 0 is shutdown. ')
zxr10SystemMemSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemMemSize.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemMemSize.setDescription('the physical memory size of the unit owned, the unit is byte.')
zxr10SystemMemUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemMemUsed.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemMemUsed.setDescription('the used momery described by Octets')
zxr10SystemCpuUtility2m = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemCpuUtility2m.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemCpuUtility2m.setDescription('the cpu utility described by percent(2 minutes)')
zxr10SystemCpuUtility5s = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemCpuUtility5s.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemCpuUtility5s.setDescription('the cpu utility described by percent( 5 seconds).')
zxr10SystemCpuUtility30s = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemCpuUtility30s.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemCpuUtility30s.setDescription('the cpu utility described by percent( 30 seconds).')
zxr10SystemPeakCpuUtility = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemPeakCpuUtility.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemPeakCpuUtility.setDescription('the cpu peak utility described by percent.(measured in 30 seconds)')
zxr10SystemUnitUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 9), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemUnitUpTime.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemUnitUpTime.setDescription('the total time of system running.')
zxr10SystemUnitPidNum = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemUnitPidNum.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemUnitPidNum.setDescription('the total process running on the unit.')
zxr10SystemCpuUtility1m = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemCpuUtility1m.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemCpuUtility1m.setDescription('the cpu utility described by percent( 1 minute).')
zxr10SystemCpuUtility5m = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SystemCpuUtility5m.setStatus('current')
if mibBuilder.loadTexts: zxr10SystemCpuUtility5m.setDescription('the cpu utility described by percent( 5 minute).')
zxr10UnitPidTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3), )
if mibBuilder.loadTexts: zxr10UnitPidTable.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidTable.setDescription('A list of the pid information.')
zxr10UnitPidEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10SystemUnitIndex"), (0, "ZXR10-MIB", "zxr10UnitPidNo"))
if mibBuilder.loadTexts: zxr10UnitPidEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidEntry.setDescription('An entry to the pid information table.')
zxr10UnitPidNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidNo.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidNo.setDescription('the pid no . ')
zxr10UnitPidUsedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidUsedStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidUsedStatus.setDescription('the pid used status . ')
zxr10UnitPidName = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidName.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidName.setDescription('the pid name. ')
zxr10UnitPidPrio = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidPrio.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidPrio.setDescription('the priority of the process described by number, the number is bigger the priority is lower. ')
zxr10UnitPidStackSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidStackSize.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidStackSize.setDescription('the stack size of the process is owned, its unit is byte. ')
zxr10UnitPidCalledTimes = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidCalledTimes.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidCalledTimes.setDescription('the times that the process was called by system. ')
zxr10UnitPidCpuOccupanTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 7), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidCpuOccupanTime.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidCpuOccupanTime.setDescription('the total time that the process occupying cpu. ')
zxr10UnitPidInterruptTimes = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidInterruptTimes.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidInterruptTimes.setDescription(' the times that the process was interrupted by other process. ')
zxr10UnitPidAsyQuenMsgMax = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenMsgMax.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenMsgMax.setDescription('the max message number of the asynchronism quen. ')
zxr10UnitPidAsyQuenUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenUsed.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenUsed.setDescription('the message number that waiting to be deal with in the asynchronism quen. ')
zxr10UnitPidAsyQuenBlocked = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenBlocked.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenBlocked.setDescription('the message number that blocked in the asynchronism quen. ')
zxr10UnitPidAsyQuenSendTimeouts = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenSendTimeouts.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenSendTimeouts.setDescription('the discarded messages sending to the process for timeout . ')
zxr10UnitPidAsyQuenRecTimeouts = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenRecTimeouts.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidAsyQuenRecTimeouts.setDescription('the discarded messages sent by the process for timeout . ')
zxr10UnitPidSynQuenMsgMax = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidSynQuenMsgMax.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidSynQuenMsgMax.setDescription('the max message number of the synchronism quen. ')
zxr10UnitPidSynQuenUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidSynQuenUsed.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidSynQuenUsed.setDescription('the message number that waiting to be deal with in the synchronism quen. ')
zxr10UnitPidSynQuenBlocked = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidSynQuenBlocked.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidSynQuenBlocked.setDescription('the message number that blocked in the synchronism quen. ')
zxr10UnitPidSynQuenSendTimeouts = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidSynQuenSendTimeouts.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidSynQuenSendTimeouts.setDescription('the discarded messages sending to the process for timeout .')
zxr10UnitPidSynQuenRecTimeouts = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidSynQuenRecTimeouts.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidSynQuenRecTimeouts.setDescription('the discard messages sent by the process for timeout .')
zxr10UnitPidTimerNamedUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidTimerNamedUsed.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidTimerNamedUsed.setDescription('the total number of named timer used in this process. ')
zxr10UnitPidTimerUnnamedUsed = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 3, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitPidTimerUnnamedUsed.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitPidTimerUnnamedUsed.setDescription('the total number of unnamed timer used in this process. ')
zxr10UnitCommStatTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4), )
if mibBuilder.loadTexts: zxr10UnitCommStatTable.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitCommStatTable.setDescription('A list of the unit communication information.')
zxr10UnitCommStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10SystemUnitIndex"), (0, "ZXR10-MIB", "zxr10UnitNo"))
if mibBuilder.loadTexts: zxr10UnitCommStatEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitCommStatEntry.setDescription('An entry to the unit communication info table.')
zxr10UnitNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitNo.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitNo.setDescription('the unit no . ')
zxr10UnitsndMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitsndMsgs.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitsndMsgs.setDescription(' the send message packets of the unit. ')
zxr10UnitsndBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitsndBytes.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitsndBytes.setDescription(' the message bytes of the unit send. ')
zxr10UnitsndByteGigas = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitsndByteGigas.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitsndByteGigas.setDescription(' the message Giga bytes of the unit send. ')
zxr10UnitrcvMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitrcvMsgs.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitrcvMsgs.setDescription(' the received message packets of the unit. ')
zxr10UnitrcvBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitrcvBytes.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitrcvBytes.setDescription(' the message bytes of the unit received. ')
zxr10UnitrcvByteGigas = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 3, 4, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10UnitrcvByteGigas.setStatus('current')
if mibBuilder.loadTexts: zxr10UnitrcvByteGigas.setDescription(' the message giga bytes of the unit received. ')
zxr10HardwareEnvironAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1), )
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmTable.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmTable.setDescription('A list of the environment alarm information.')
zxr10HardwareEnvironAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10HardwareEnvironAlarmSlotNo"), (0, "ZXR10-MIB", "zxr10HardwareEnvironAlarmCode"))
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmEntry.setDescription('An entry to the environ alarm information table.')
zxr10HardwareEnvironAlarmRackNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmRackNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmRackNo.setDescription('the Rack no . ')
zxr10HardwareEnvironAlarmShelfNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmShelfNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmShelfNo.setDescription('the shelf no. ')
zxr10HardwareEnvironAlarmSlotNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmSlotNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmSlotNo.setDescription('the slot no. ')
zxr10HardwareEnvironAlarmCode = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmCode.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmCode.setDescription('the environment alarm code . ')
zxr10HardwareEnvironAlarmLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmLevel.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmLevel.setDescription('the alarm level. ')
zxr10HardwareEnvironAlarmTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 6), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmTime.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmTime.setDescription('the total running-time after the Router System up when the environment alarm occurred. ')
zxr10HardwareEnvironAlarmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 7), BoolStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmStatus.setDescription('the environ alarm status: (true or 1) RAISED or (false or 0) CLEARED . ')
zxr10HardwareEnvironAlarmType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 8), AlarmType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmType.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmType.setDescription('the alarm type, here is environ.')
zxr10HardwareEnvironAlarmDescrip = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 1, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmDescrip.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareEnvironAlarmDescrip.setDescription('the description string. ')
zxr10HardwareBoardAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2), )
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmTable.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmTable.setDescription('A list of the board alarm information.')
zxr10HardwareBoardAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10HardwareBoardAlarmSlotNo"), (0, "ZXR10-MIB", "zxr10HardwareBoardAlarmCode"))
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmEntry.setDescription('An entry to the board alarm information table.')
zxr10HardwareBoardAlarmRackNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmRackNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmRackNo.setDescription('the Rack no . ')
zxr10HardwareBoardAlarmShelfNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmShelfNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmShelfNo.setDescription('the shelf no. ')
zxr10HardwareBoardAlarmSlotNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmSlotNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmSlotNo.setDescription('the slot no. ')
zxr10HardwareBoardAlarmCode = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmCode.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmCode.setDescription('the board alarm code . ')
zxr10HardwareBoardAlarmLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmLevel.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmLevel.setDescription('the alarm level. ')
zxr10HardwareBoardAlarmTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 6), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmTime.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmTime.setDescription('the total running-time after the Router System up when the board alarm occurred. ')
zxr10HardwareBoardAlarmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 7), BoolStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmStatus.setDescription('the board alarm status: (true or 1) RAISED , (false or 0) CLEARED . ')
zxr10HardwareBoardAlarmType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 8), AlarmType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmType.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmType.setDescription('the alarm type, here is board.')
zxr10HardwareBoardAlarmDescrip = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 2, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmDescrip.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwareBoardAlarmDescrip.setDescription('the description string. ')
zxr10HardwarePortAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3), )
if mibBuilder.loadTexts: zxr10HardwarePortAlarmTable.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmTable.setDescription('A list of the port alarm information.')
zxr10HardwarePortAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10HardwarePortAlarmSlotNo"), (0, "ZXR10-MIB", "zxr10HardwarePortAlarmPortNo"), (0, "ZXR10-MIB", "zxr10HardwarePortAlarmCode"))
if mibBuilder.loadTexts: zxr10HardwarePortAlarmEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmEntry.setDescription('An entry to the port alarm information table.')
zxr10HardwarePortAlarmRackNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmRackNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmRackNo.setDescription('the Rack no . ')
zxr10HardwarePortAlarmShelfNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmShelfNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmShelfNo.setDescription('the shelf no. ')
zxr10HardwarePortAlarmSlotNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmSlotNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmSlotNo.setDescription('the slot no. ')
zxr10HardwarePortAlarmPortNo = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmPortNo.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmPortNo.setDescription('the port no. ')
zxr10HardwarePortAlarmCode = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmCode.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmCode.setDescription('the port alarm code . ')
zxr10HardwarePortAlarmLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmLevel.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmLevel.setDescription('the alarm level. ')
zxr10HardwarePortAlarmTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 7), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmTime.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmTime.setDescription('the total running-time after the Router System up when the port alarm occurred. ')
zxr10HardwarePortAlarmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 8), BoolStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmStatus.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmStatus.setDescription('the port alarm status: (true or 1) RAISED , (false or 0) CLEARED . ')
zxr10HardwarePortAlarmType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 9), AlarmType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmType.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmType.setDescription('the alarm type, here is port.')
zxr10HardwarePortAlarmDescrip = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 3, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10HardwarePortAlarmDescrip.setStatus('current')
if mibBuilder.loadTexts: zxr10HardwarePortAlarmDescrip.setDescription('the description string. ')
zxr10SoftProtocolAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 4, 4), )
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTable.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmTable.setDescription('A list of the softprotocol alarm information.')
zxr10SoftProtocolAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 4, 4, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10SoftProtocolAlarmSaveCode"))
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmEntry.setDescription('An entry to the softprotocol alarm information table.')
zxr10SoftProtocolAlarmSaveCode = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveCode.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveCode.setDescription('the softprotocol alarm code . ')
zxr10SoftProtocolAlarmSaveLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveLevel.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveLevel.setDescription('the alarm level . ')
zxr10SoftProtocolAlarmSaveLasttime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 4, 1, 3), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveLasttime.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveLasttime.setDescription('the total running-time after the Router System up when the latest softprotocol alarm occurred. ')
zxr10SoftProtocolAlarmSaveTotaltimes = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveTotaltimes.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveTotaltimes.setDescription('total times of the softprotocol alarm occurred . ')
zxr10SoftProtocolAlarmSaveType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 4, 1, 5), AlarmType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveType.setStatus('current')
if mibBuilder.loadTexts: zxr10SoftProtocolAlarmSaveType.setDescription('the softprotocol alarm sub-type . ')
zxr10StatisticsAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 3902, 3, 4, 5), )
if mibBuilder.loadTexts: zxr10StatisticsAlarmTable.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmTable.setDescription('A list of the Statistics alarm information.')
zxr10StatisticsAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3902, 3, 4, 5, 1), ).setIndexNames((0, "ZXR10-MIB", "zxr10StatisticsAlarmSaveCode"))
if mibBuilder.loadTexts: zxr10StatisticsAlarmEntry.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmEntry.setDescription('An entry to the Statistics alarm information table.')
zxr10StatisticsAlarmSaveCode = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 5, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveCode.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveCode.setDescription('the Statistics alarm code . ')
zxr10StatisticsAlarmSaveLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 5, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveLevel.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveLevel.setDescription('the alarm level . ')
zxr10StatisticsAlarmSaveLasttime = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 5, 1, 3), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveLasttime.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveLasttime.setDescription('the total running-time after the Router System up when the latest Statistics alarm occurred. ')
zxr10StatisticsAlarmSaveTotaltimes = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 5, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveTotaltimes.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveTotaltimes.setDescription('total times of the Statistics alarm occurred . ')
zxr10StatisticsAlarmSaveType = MibTableColumn((1, 3, 6, 1, 4, 1, 3902, 3, 4, 5, 1, 5), AlarmType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveType.setStatus('current')
if mibBuilder.loadTexts: zxr10StatisticsAlarmSaveType.setDescription('the Statistics alarm sub-type . ')
zxr10_objectID = MibIdentifier((1, 3, 6, 1, 4, 1, 3902, 3, 100)).setLabel("zxr10-objectID")
zxr10RouterT128SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 1))
if mibBuilder.loadTexts: zxr10RouterT128SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterT128SysID.setDescription('zxr10 T128 router')
zxr10RouterT64SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 2))
if mibBuilder.loadTexts: zxr10RouterT64SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterT64SysID.setDescription('zxr10 T64 router')
zxr10SwitchT32SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 3))
if mibBuilder.loadTexts: zxr10SwitchT32SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10SwitchT32SysID.setDescription('zxr10 T32 switch')
zxr10RouterGER8SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 5))
if mibBuilder.loadTexts: zxr10RouterGER8SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterGER8SysID.setDescription('zxr10 ger-8 router')
zxr10RouterGAR_2608SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 4)).setLabel("zxr10RouterGAR-2608SysID")
if mibBuilder.loadTexts: zxr10RouterGAR_2608SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterGAR_2608SysID.setDescription('zxr10 GAR 2608 router')
zxr10RouterGAR_2604SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 6)).setLabel("zxr10RouterGAR-2604SysID")
if mibBuilder.loadTexts: zxr10RouterGAR_2604SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterGAR_2604SysID.setDescription('zxr10 GAR 2604 router')
zxr10SwitchT160GSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 7))
if mibBuilder.loadTexts: zxr10SwitchT160GSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10SwitchT160GSysID.setDescription('zxr10 T160G switch')
zxr10RouterGAR_3608SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 8)).setLabel("zxr10RouterGAR-3608SysID")
if mibBuilder.loadTexts: zxr10RouterGAR_3608SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterGAR_3608SysID.setDescription('zxr10 GAR 3608 router')
zxr10RouterGAR_7208SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 9)).setLabel("zxr10RouterGAR-7208SysID")
if mibBuilder.loadTexts: zxr10RouterGAR_7208SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterGAR_7208SysID.setDescription('zxr10 GAR 7208 router')
zxr10SwitchT64GSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 10))
if mibBuilder.loadTexts: zxr10SwitchT64GSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10SwitchT64GSysID.setDescription('zxr10 T64G switch')
zxr10Switch3206SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 11))
if mibBuilder.loadTexts: zxr10Switch3206SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3206SysID.setDescription('zxr10 3206 switch')
zxr10Switch3906SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 12))
if mibBuilder.loadTexts: zxr10Switch3906SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3906SysID.setDescription('zxr10 3906 switch')
zxr10Switch3228SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 13))
if mibBuilder.loadTexts: zxr10Switch3228SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3228SysID.setDescription('zxr10 3228 switch')
zxr10Switch3928SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 14))
if mibBuilder.loadTexts: zxr10Switch3928SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3928SysID.setDescription('zxr10 3928 switch')
zxr10Switch3252SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 15))
if mibBuilder.loadTexts: zxr10Switch3252SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3252SysID.setDescription('zxr10 3252 switch')
zxr10Switch3952SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 16))
if mibBuilder.loadTexts: zxr10Switch3952SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3952SysID.setDescription('zxr10 3952 switch')
zxr10Switch5224SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 17))
if mibBuilder.loadTexts: zxr10Switch5224SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5224SysID.setDescription('zxr10 5224 switch')
zxr10Switch5228SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 18))
if mibBuilder.loadTexts: zxr10Switch5228SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5228SysID.setDescription('zxr10 5228 switch')
zxr10Switch5228FSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 19))
if mibBuilder.loadTexts: zxr10Switch5228FSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5228FSysID.setDescription('zxr10 5228F switch')
zxr10Switch5928SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 20))
if mibBuilder.loadTexts: zxr10Switch5928SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5928SysID.setDescription('zxr10 5928 switch')
zxr10Switch5928FSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 21))
if mibBuilder.loadTexts: zxr10Switch5928FSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5928FSysID.setDescription('zxr10 5928F switch')
zxr10Switch5252SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 22))
if mibBuilder.loadTexts: zxr10Switch5252SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5252SysID.setDescription('zxr10 5252 switch')
zxr10Switch5952SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 23))
if mibBuilder.loadTexts: zxr10Switch5952SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5952SysID.setDescription('zxr10 5952 switch')
zxr10Switch3226SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 24))
if mibBuilder.loadTexts: zxr10Switch3226SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3226SysID.setDescription('zxr10 3226 switch')
zxr10SwitchT40GSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 25))
if mibBuilder.loadTexts: zxr10SwitchT40GSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10SwitchT40GSysID.setDescription('zxr10 T40G switch')
zxr10RouterT1200SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 26))
if mibBuilder.loadTexts: zxr10RouterT1200SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterT1200SysID.setDescription('zxr10 T1200 router')
zxr10RouterT600SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 27))
if mibBuilder.loadTexts: zxr10RouterT600SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterT600SysID.setDescription('zxr10 T600 router')
zxr10RouterGER2SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 28))
if mibBuilder.loadTexts: zxr10RouterGER2SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterGER2SysID.setDescription('zxr10 ger-2 router')
zxr10RouterGER4SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 29))
if mibBuilder.loadTexts: zxr10RouterGER4SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterGER4SysID.setDescription('zxr10 ger-4 router')
zxr10Switch3226FISysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 30))
if mibBuilder.loadTexts: zxr10Switch3226FISysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3226FISysID.setDescription('zxr10 3226FI switch')
zxr10Switch3928ASysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 31))
if mibBuilder.loadTexts: zxr10Switch3928ASysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3928ASysID.setDescription('zxr10 3900A switch')
zxr10Switch3928AFISysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 32))
if mibBuilder.loadTexts: zxr10Switch3928AFISysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3928AFISysID.setDescription('zxr10 3928A switch')
zxr10Switch3952ASysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 33))
if mibBuilder.loadTexts: zxr10Switch3952ASysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3952ASysID.setDescription('zxr10 3952A switch')
zxr10Switch3228A_EISysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 34)).setLabel("zxr10Switch3228A-EISysID")
if mibBuilder.loadTexts: zxr10Switch3228A_EISysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3228A_EISysID.setDescription('zxr10 3228A-EI switch')
zxr10Switch3228ASysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 35))
if mibBuilder.loadTexts: zxr10Switch3228ASysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3228ASysID.setDescription('zxr10 3228A switch')
zxr10Switch3228A_FISysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 36)).setLabel("zxr10Switch3228A-FISysID")
if mibBuilder.loadTexts: zxr10Switch3228A_FISysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3228A_FISysID.setDescription('zxr10 3228A-FI switch')
zxr10Switch3252ASysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 37))
if mibBuilder.loadTexts: zxr10Switch3252ASysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3252ASysID.setDescription('zxr10 3252A switch')
zxr10Switch5228ASysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 38))
if mibBuilder.loadTexts: zxr10Switch5228ASysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5228ASysID.setDescription('zxr10 5228A switch')
zxr10Switch5252ASysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 39))
if mibBuilder.loadTexts: zxr10Switch5252ASysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5252ASysID.setDescription('zxr10 5252A switch')
zxr10Switch5928ESysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 40))
if mibBuilder.loadTexts: zxr10Switch5928ESysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5928ESysID.setDescription('zxr10 5928E switch')
zxr10Switch5928E_FISysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 41)).setLabel("zxr10Switch5928E-FISysID")
if mibBuilder.loadTexts: zxr10Switch5928E_FISysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5928E_FISysID.setDescription('zxr10 5928E-FI switch')
zxr10Switch3952ESysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 42))
if mibBuilder.loadTexts: zxr10Switch3952ESysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3952ESysID.setDescription('zxr10 3952E switch')
zxr10Switch5952ESysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 43))
if mibBuilder.loadTexts: zxr10Switch5952ESysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5952ESysID.setDescription('zxr10 5952E switch')
zxr10RouterR10_1822_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 100)).setLabel("zxr10RouterR10-1822-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1822_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1822_ACSysID.setDescription('zxr10 GAR 1822-AC router')
zxr10RouterR10_1822_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 101)).setLabel("zxr10RouterR10-1822-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1822_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1822_DCSysID.setDescription('zxr10 GAR 1822-DC router')
zxr10RouterR10_1821_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 102)).setLabel("zxr10RouterR10-1821-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1821_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1821_ACSysID.setDescription('zxr10 GAR 1821-AC router')
zxr10RouterR10_1821_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 103)).setLabel("zxr10RouterR10-1821-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1821_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1821_DCSysID.setDescription('zxr10 GAR 1821-DC router')
zxr10RouterR10_1812_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 104)).setLabel("zxr10RouterR10-1812-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1812_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1812_ACSysID.setDescription('zxr10 GAR 1821-AC router')
zxr10RouterR10_1812_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 105)).setLabel("zxr10RouterR10-1812-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1812_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1812_DCSysID.setDescription('zxr10 GAR 1812-DC router')
zxr10RouterR10_1811_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 106)).setLabel("zxr10RouterR10-1811-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1811_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1811_ACSysID.setDescription('zxr10 GAR 1811-AC router')
zxr10RouterR10_1811_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 107)).setLabel("zxr10RouterR10-1811-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1811_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1811_DCSysID.setDescription('zxr10 GAR 1811-DC router')
zxr10RouterR10_1822E_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 108)).setLabel("zxr10RouterR10-1822E-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1822E_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1822E_ACSysID.setDescription('zxr10 GAR 1822E-AC router')
zxr10RouterR10_1822E_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 109)).setLabel("zxr10RouterR10-1822E-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1822E_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1822E_DCSysID.setDescription('zxr10 GAR 1822E-DC router')
zxr10RouterR10_1821E_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 110)).setLabel("zxr10RouterR10-1821E-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1821E_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1821E_ACSysID.setDescription('zxr10 GAR 1821E-AC router')
zxr10RouterR10_1821E_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 111)).setLabel("zxr10RouterR10-1821E-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1821E_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1821E_DCSysID.setDescription('zxr10 GAR 1821E-DC router')
zxr10RouterR10_1812E_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 112)).setLabel("zxr10RouterR10-1812E-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1812E_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1812E_ACSysID.setDescription('zxr10 GAR 1812E-AC router')
zxr10RouterR10_1812E_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 113)).setLabel("zxr10RouterR10-1812E-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1812E_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1812E_DCSysID.setDescription('zxr10 GAR 1812E-DC router')
zxr10RouterR10_1811E_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 114)).setLabel("zxr10RouterR10-1811E-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1811E_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1811E_ACSysID.setDescription('zxr10 GAR 1811E-AC router')
zxr10RouterR10_1811E_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 115)).setLabel("zxr10RouterR10-1811E-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1811E_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1811E_DCSysID.setDescription('zxr10 GAR 1811E-DC router')
zxr10RouterR10_3881_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 132)).setLabel("zxr10RouterR10-3881-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3881_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3881_ACSysID.setDescription('zxr10 GAR 3881-AC router')
zxr10RouterR10_3882_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 133)).setLabel("zxr10RouterR10-3882-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3882_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3882_ACSysID.setDescription('zxr10 GAR 3882-AC router')
zxr10RouterR10_3883_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 134)).setLabel("zxr10RouterR10-3883-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3883_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3883_ACSysID.setDescription('zxr10 GAR 3883-AC router')
zxr10RouterR10_3884_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 135)).setLabel("zxr10RouterR10-3884-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3884_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3884_ACSysID.setDescription('zxr10 GAR 3884-AC router')
zxr10RouterR10_3881_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 136)).setLabel("zxr10RouterR10-3881-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3881_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3881_DCSysID.setDescription('zxr10 GAR 3881-DC router')
zxr10RouterR10_3882_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 137)).setLabel("zxr10RouterR10-3882-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3882_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3882_DCSysID.setDescription('zxr10 GAR 3882-DC router')
zxr10RouterR10_3883_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 138)).setLabel("zxr10RouterR10-3883-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3883_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3883_DCSysID.setDescription('zxr10 GAR 3883-DC router')
zxr10RouterR10_3884_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 139)).setLabel("zxr10RouterR10-3884-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3884_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3884_DCSysID.setDescription('zxr10 GAR 3884-DC router')
zxr10RouterR10_3841_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 140)).setLabel("zxr10RouterR10-3841-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3841_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3841_ACSysID.setDescription('zxr10 GAR 3841-AC router')
zxr10RouterR10_3842_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 141)).setLabel("zxr10RouterR10-3842-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3842_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3842_ACSysID.setDescription('zxr10 GAR 3842-AC router')
zxr10RouterR10_3843_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 142)).setLabel("zxr10RouterR10-3843-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3843_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3843_ACSysID.setDescription('zxr10 GAR 3843-AC router')
zxr10RouterR10_3844_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 143)).setLabel("zxr10RouterR10-3844-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3844_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3844_ACSysID.setDescription('zxr10 GAR 3844-AC router')
zxr10RouterR10_3841_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 144)).setLabel("zxr10RouterR10-3841-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3841_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3841_DCSysID.setDescription('zxr10 GAR 3841-DC router')
zxr10RouterR10_3842_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 145)).setLabel("zxr10RouterR10-3842-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3842_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3842_DCSysID.setDescription('zxr10 GAR 3842-DC router')
zxr10RouterR10_3843_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 146)).setLabel("zxr10RouterR10-3843-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3843_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3843_DCSysID.setDescription('zxr10 GAR 3843-DC router')
zxr10RouterR10_3844_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 147)).setLabel("zxr10RouterR10-3844-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3844_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3844_DCSysID.setDescription('zxr10 GAR 3844-DC router')
zxr10RouterR10_3821_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 148)).setLabel("zxr10RouterR10-3821-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3821_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3821_ACSysID.setDescription('zxr10 GAR 3821-AC router')
zxr10RouterR10_3822_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 149)).setLabel("zxr10RouterR10-3822-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3822_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3822_ACSysID.setDescription('zxr10 GAR 3822-AC router')
zxr10RouterR10_3823_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 150)).setLabel("zxr10RouterR10-3823-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3823_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3823_ACSysID.setDescription('zxr10 GAR 3823-AC router')
zxr10RouterR10_3824_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 151)).setLabel("zxr10RouterR10-3824-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3824_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3824_ACSysID.setDescription('zxr10 GAR 3824-AC router')
zxr10RouterR10_3821_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 152)).setLabel("zxr10RouterR10-3821-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3821_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3821_DCSysID.setDescription('zxr10 GAR 3821-DC router')
zxr10RouterR10_3822_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 153)).setLabel("zxr10RouterR10-3822-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3822_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3822_DCSysID.setDescription('zxr10 GAR 3822-DC router')
zxr10RouterR10_3823_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 154)).setLabel("zxr10RouterR10-3823-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3823_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3823_DCSysID.setDescription('zxr10 GAR 3823-DC router')
zxr10RouterR10_3824_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 155)).setLabel("zxr10RouterR10-3824-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_3824_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_3824_DCSysID.setDescription('zxr10 GAR 3824-DC router')
zxr10RouterR10_2841_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 172)).setLabel("zxr10RouterR10-2841-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2841_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2841_ACSysID.setDescription('zxr10 GAR 2841-AC router')
zxr10RouterR10_2842_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 173)).setLabel("zxr10RouterR10-2842-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2842_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2842_ACSysID.setDescription('zxr10 GAR 2842-AC router')
zxr10RouterR10_2843_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 174)).setLabel("zxr10RouterR10-2843-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2843_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2843_ACSysID.setDescription('zxr10 GAR 2843-AC router')
zxr10RouterR10_2844_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 175)).setLabel("zxr10RouterR10-2844-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2844_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2844_ACSysID.setDescription('zxr10 GAR 2844-AC router')
zxr10RouterR10_2841_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 176)).setLabel("zxr10RouterR10-2841-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2841_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2841_DCSysID.setDescription('zxr10 GAR 2841-DC router')
zxr10RouterR10_2842_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 177)).setLabel("zxr10RouterR10-2842-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2842_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2842_DCSysID.setDescription('zxr10 GAR 2842-DC router')
zxr10RouterR10_2843_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 178)).setLabel("zxr10RouterR10-2843-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2843_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2843_DCSysID.setDescription('zxr10 GAR 2843-DC router')
zxr10RouterR10_2844_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 179)).setLabel("zxr10RouterR10-2844-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2844_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2844_DCSysID.setDescription('zxr10 GAR 2844-DC router')
zxr10RouterR10_2881_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 180)).setLabel("zxr10RouterR10-2881-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2881_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2881_ACSysID.setDescription('zxr10 GAR 2881-AC router')
zxr10RouterR10_2882_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 181)).setLabel("zxr10RouterR10-2882-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2882_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2882_ACSysID.setDescription('zxr10 GAR 2882-AC router')
zxr10RouterR10_2883_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 182)).setLabel("zxr10RouterR10-2883-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2883_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2883_ACSysID.setDescription('zxr10 GAR 2883-AC router')
zxr10RouterR10_2884_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 183)).setLabel("zxr10RouterR10-2884-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2884_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2884_ACSysID.setDescription('zxr10 GAR 2884-AC router')
zxr10RouterR10_2881_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 184)).setLabel("zxr10RouterR10-2881-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2881_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2881_DCSysID.setDescription('zxr10 GAR 2881-DC router')
zxr10RouterR10_2882_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 185)).setLabel("zxr10RouterR10-2882-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2882_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2882_DCSysID.setDescription('zxr10 GAR 2882-DC router')
zxr10RouterR10_2883_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 186)).setLabel("zxr10RouterR10-2883-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2883_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2883_DCSysID.setDescription('zxr10 GAR 2883-DC router')
zxr10RouterR10_2884_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 187)).setLabel("zxr10RouterR10-2884-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2884_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2884_DCSysID.setDescription('zxr10 GAR 2884-DC router')
zxr10RouterR10_2821_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 188)).setLabel("zxr10RouterR10-2821-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2821_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2821_ACSysID.setDescription('zxr10 GAR 2821-AC router')
zxr10RouterR10_2822_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 189)).setLabel("zxr10RouterR10-2822-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2822_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2822_ACSysID.setDescription('zxr10 GAR 2822-AC router')
zxr10RouterR10_2823_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 190)).setLabel("zxr10RouterR10-2823-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2823_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2823_ACSysID.setDescription('zxr10 GAR 2823-AC router')
zxr10RouterR10_2824_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 191)).setLabel("zxr10RouterR10-2824-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2824_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2824_ACSysID.setDescription('zxr10 GAR 2824-AC router')
zxr10RouterR10_2821_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 192)).setLabel("zxr10RouterR10-2821-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2821_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2821_DCSysID.setDescription('zxr10 GAR 2821-DC router')
zxr10RouterR10_2822_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 193)).setLabel("zxr10RouterR10-2822-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2822_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2822_DCSysID.setDescription('zxr10 GAR 2822-DC router')
zxr10RouterR10_2823_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 194)).setLabel("zxr10RouterR10-2823-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2823_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2823_DCSysID.setDescription('zxr10 GAR 2823-DC router')
zxr10RouterR10_2824_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 195)).setLabel("zxr10RouterR10-2824-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_2824_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_2824_DCSysID.setDescription('zxr10 GAR 2824-DC router')
zxr10RouterR10_1841_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 196)).setLabel("zxr10RouterR10-1841-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1841_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1841_ACSysID.setDescription('zxr10 GAR 1841-AC router')
zxr10RouterR10_1842_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 197)).setLabel("zxr10RouterR10-1842-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1842_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1842_ACSysID.setDescription('zxr10 GAR 1842-AC router')
zxr10RouterR10_1843_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 198)).setLabel("zxr10RouterR10-1843-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1843_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1843_ACSysID.setDescription('zxr10 GAR 1843-AC router')
zxr10RouterR10_1844_ACSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 199)).setLabel("zxr10RouterR10-1844-ACSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1844_ACSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1844_ACSysID.setDescription('zxr10 GAR 1844-AC router')
zxr10RouterR10_1841_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 200)).setLabel("zxr10RouterR10-1841-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1841_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1841_DCSysID.setDescription('zxr10 GAR 1841-DC router')
zxr10RouterR10_1842_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 201)).setLabel("zxr10RouterR10-1842-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1842_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1842_DCSysID.setDescription('zxr10 GAR 1842-DC router')
zxr10RouterR10_1843_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 202)).setLabel("zxr10RouterR10-1843-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1843_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1843_DCSysID.setDescription('zxr10 GAR 1843-DC router')
zxr10RouterR10_1844_DCSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 203)).setLabel("zxr10RouterR10-1844-DCSysID")
if mibBuilder.loadTexts: zxr10RouterR10_1844_DCSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10RouterR10_1844_DCSysID.setDescription('zxr10 GAR 1844-DC router')
zxr10Switch_6907SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 400)).setLabel("zxr10Switch-6907SysID")
if mibBuilder.loadTexts: zxr10Switch_6907SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_6907SysID.setDescription('zxr10 6907 switch')
zxr10Switch_T240GSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 401)).setLabel("zxr10Switch-T240GSysID")
if mibBuilder.loadTexts: zxr10Switch_T240GSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_T240GSysID.setDescription('zxr10 T240G switch')
zxr10Switch_6902SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 402)).setLabel("zxr10Switch-6902SysID")
if mibBuilder.loadTexts: zxr10Switch_6902SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_6902SysID.setDescription('zxr10 6902 switch')
zxr10Switch_6905SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 403)).setLabel("zxr10Switch-6905SysID")
if mibBuilder.loadTexts: zxr10Switch_6905SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_6905SysID.setDescription('zxr10 6905 switch')
zxr10Switch_6908SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 404)).setLabel("zxr10Switch-6908SysID")
if mibBuilder.loadTexts: zxr10Switch_6908SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_6908SysID.setDescription('zxr10 6908 switch')
zxr10Switch_8902SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 405)).setLabel("zxr10Switch-8902SysID")
if mibBuilder.loadTexts: zxr10Switch_8902SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_8902SysID.setDescription('zxr10 8902 switch')
zxr10Switch_8905SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 406)).setLabel("zxr10Switch-8905SysID")
if mibBuilder.loadTexts: zxr10Switch_8905SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_8905SysID.setDescription('zxr10 8905 switch')
zxr10Switch_8908SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 407)).setLabel("zxr10Switch-8908SysID")
if mibBuilder.loadTexts: zxr10Switch_8908SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_8908SysID.setDescription('zxr10 8908 switch')
zxr10Switch_8912SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 408)).setLabel("zxr10Switch-8912SysID")
if mibBuilder.loadTexts: zxr10Switch_8912SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch_8912SysID.setDescription('zxr10 8912 switch')
zxctn_6100SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 409)).setLabel("zxctn-6100SysID")
if mibBuilder.loadTexts: zxctn_6100SysID.setStatus('current')
if mibBuilder.loadTexts: zxctn_6100SysID.setDescription('zxctn 6100')
zxr10Switch5928_PSSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 417)).setLabel("zxr10Switch5928-PSSysID")
if mibBuilder.loadTexts: zxr10Switch5928_PSSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch5928_PSSysID.setDescription('zxr10 5928-PS switch')
zxr10Switch3928A_PSSysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 418)).setLabel("zxr10Switch3928A-PSSysID")
if mibBuilder.loadTexts: zxr10Switch3928A_PSSysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3928A_PSSysID.setDescription('zxctn 3928A-PS switch')
zxr10Switch3928ESysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 419))
if mibBuilder.loadTexts: zxr10Switch3928ESysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3928ESysID.setDescription('zxr10 3928E switch')
zxr10Switch3928E_FISysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 420)).setLabel("zxr10Switch3928E-FISysID")
if mibBuilder.loadTexts: zxr10Switch3928E_FISysID.setStatus('current')
if mibBuilder.loadTexts: zxr10Switch3928E_FISysID.setDescription('zxr10 3928E-FI switch')
zxr10UAS10600SysID = ObjectIdentity((1, 3, 6, 1, 4, 1, 3902, 3, 100, 5000))
if mibBuilder.loadTexts: zxr10UAS10600SysID.setStatus('current')
if mibBuilder.loadTexts: zxr10UAS10600SysID.setDescription('zxr10 uas10600')
mibBuilder.exportSymbols("ZXR10-MIB", zxr10Switch5228SysID=zxr10Switch5228SysID, zxr10RouterR10_2881_ACSysID=zxr10RouterR10_2881_ACSysID, zxr10ShelfNo=zxr10ShelfNo, zxr10RouterR10_2823_DCSysID=zxr10RouterR10_2823_DCSysID, zxr10HardwareAlarmTime=zxr10HardwareAlarmTime, zxr10RouterR10_2823_ACSysID=zxr10RouterR10_2823_ACSysID, zxr10StatisticsAlarmVariableValue=zxr10StatisticsAlarmVariableValue, zxr10SoftProtocolAlarmTrapTable=zxr10SoftProtocolAlarmTrapTable, zxr10Switch3252ASysID=zxr10Switch3252ASysID, zte=zte, zxr10ShelfAvailStatus=zxr10ShelfAvailStatus, zxr10BoardSilkLabel=zxr10BoardSilkLabel, zxr10Switch3252SysID=zxr10Switch3252SysID, zxr10HardwareEnvironAlarmSlotNo=zxr10HardwareEnvironAlarmSlotNo, zxr10RouterR10_3884_ACSysID=zxr10RouterR10_3884_ACSysID, zxr10Switch3228A_EISysID=zxr10Switch3228A_EISysID, zxr10HardwareAlarmDescrip=zxr10HardwareAlarmDescrip, zxr10UnitPidTimerUnnamedUsed=zxr10UnitPidTimerUnnamedUsed, zxr10SoftProtocolAlarmSaveTotaltimes=zxr10SoftProtocolAlarmSaveTotaltimes, zxr10HardwareAlarmType=zxr10HardwareAlarmType, zxr10camSize=zxr10camSize, zxr10RouterR10_3823_ACSysID=zxr10RouterR10_3823_ACSysID, OperStatus=OperStatus, zxr10SoftProtocolAlarmTrapEntry=zxr10SoftProtocolAlarmTrapEntry, zxr10RouterR10_1822_DCSysID=zxr10RouterR10_1822_DCSysID, zxr10SystemTrapHost=zxr10SystemTrapHost, zxr10RouterR10_1812_ACSysID=zxr10RouterR10_1812_ACSysID, zxr10Switch3906SysID=zxr10Switch3906SysID, zxr10RouterR10_3882_DCSysID=zxr10RouterR10_3882_DCSysID, zxr10SystemCpuUtility5s=zxr10SystemCpuUtility5s, zxr10Switch3228ASysID=zxr10Switch3228ASysID, zxr10RouterR10_1843_DCSysID=zxr10RouterR10_1843_DCSysID, zxr10RouterR10_3882_ACSysID=zxr10RouterR10_3882_ACSysID, zxr10rackEntry=zxr10rackEntry, zxr10RouterGAR_2604SysID=zxr10RouterGAR_2604SysID, zxr10RouterR10_3842_DCSysID=zxr10RouterR10_3842_DCSysID, zxr10SystemCpuUtility1m=zxr10SystemCpuUtility1m, zxr10HardwareEnvironAlarmTable=zxr10HardwareEnvironAlarmTable, PidUsedStatus=PidUsedStatus, zxr10HardwareBoardAlarmEntry=zxr10HardwareBoardAlarmEntry, zxr10RouterGAR_2608SysID=zxr10RouterGAR_2608SysID, zxr10Switch_8902SysID=zxr10Switch_8902SysID, zxr10portTable=zxr10portTable, BoolStatus=BoolStatus, zxr10SystemUnitRunStatus=zxr10SystemUnitRunStatus, zxr10HardwareEnvironAlarmType=zxr10HardwareEnvironAlarmType, zxr10RouterR10_2824_DCSysID=zxr10RouterR10_2824_DCSysID, zxr10SoftProtocolAlarmSaveLasttime=zxr10SoftProtocolAlarmSaveLasttime, zxr10SwitchT40GSysID=zxr10SwitchT40GSysID, zxr10rack=zxr10rack, zxr10UnitPidCpuOccupanTime=zxr10UnitPidCpuOccupanTime, zxr10HardwareBoardAlarmTime=zxr10HardwareBoardAlarmTime, zxr10RouterR10_1843_ACSysID=zxr10RouterR10_1843_ACSysID, ProductID=ProductID, zxr10RouterR10_2843_DCSysID=zxr10RouterR10_2843_DCSysID, zxr10HardwareEnvironAlarmLevel=zxr10HardwareEnvironAlarmLevel, zxr10UnitPidAsyQuenBlocked=zxr10UnitPidAsyQuenBlocked, zxr10HardwareEnvironAlarmRackNo=zxr10HardwareEnvironAlarmRackNo, zxr10UnitsndByteGigas=zxr10UnitsndByteGigas, zxr10HardwareAlarmLevel=zxr10HardwareAlarmLevel, zxr10StatisticsAlarmEntry=zxr10StatisticsAlarmEntry, zxr10SoftProtocolAlarmEntry=zxr10SoftProtocolAlarmEntry, zxr10RouterR10_1811E_DCSysID=zxr10RouterR10_1811E_DCSysID, zxr10RouterT64SysID=zxr10RouterT64SysID, zxr10Switch_T240GSysID=zxr10Switch_T240GSysID, zxr10UAS10600SysID=zxr10UAS10600SysID, AvailStatus=AvailStatus, zxr10UnitPidPrio=zxr10UnitPidPrio, PortProperty=PortProperty, zxr10Switch5928_PSSysID=zxr10Switch5928_PSSysID, zxr10HardwareBoardAlarmShelfNo=zxr10HardwareBoardAlarmShelfNo, zxr10Switch5952ESysID=zxr10Switch5952ESysID, zxr10SystemCpuUtility2m=zxr10SystemCpuUtility2m, zxr10StatisticsAlarmShelfNo=zxr10StatisticsAlarmShelfNo, zxr10Switch3928ASysID=zxr10Switch3928ASysID, AlarmType=AlarmType, zxr10SoftProtocolAlarmShelfNo=zxr10SoftProtocolAlarmShelfNo, zxr10_statistics=zxr10_statistics, zxr10HardwareBoardAlarmLevel=zxr10HardwareBoardAlarmLevel, zxr10RouterR10_1822E_DCSysID=zxr10RouterR10_1822E_DCSysID, zxr10SystemUnitUpTime=zxr10SystemUnitUpTime, ShelfAttrib=ShelfAttrib, zxr10UnitPidSynQuenMsgMax=zxr10UnitPidSynQuenMsgMax, zxr10StatisticsAlarmSaveTotaltimes=zxr10StatisticsAlarmSaveTotaltimes, zxr10HardwarePortAlarmCode=zxr10HardwarePortAlarmCode, zxr10RouterR10_2841_DCSysID=zxr10RouterR10_2841_DCSysID, zxr10StatisticsAlarmSaveLasttime=zxr10StatisticsAlarmSaveLasttime, zxr10SystemUnitPidNum=zxr10SystemUnitPidNum, zxr10RouterR10_2824_ACSysID=zxr10RouterR10_2824_ACSysID, zxr10PortProperty=zxr10PortProperty, zxr10HardwareEnvironAlarmCode=zxr10HardwareEnvironAlarmCode, zxr10Switch_6907SysID=zxr10Switch_6907SysID, zxr10RouterT1200SysID=zxr10RouterT1200SysID, zxr10RouterR10_1812E_ACSysID=zxr10RouterR10_1812E_ACSysID, zxr10Switch5228ASysID=zxr10Switch5228ASysID, zxr10StatisticsAlarmTime=zxr10StatisticsAlarmTime, zxr10HardwareBoardAlarmCode=zxr10HardwareBoardAlarmCode, zxr10UnitPidStackSize=zxr10UnitPidStackSize, zxr10RackAttrib=zxr10RackAttrib, zxr10StatisticsAlarmTrapTable=zxr10StatisticsAlarmTrapTable, zxr10RouterR10_3843_DCSysID=zxr10RouterR10_3843_DCSysID, zxr10SoftProtocolAlarmStatus=zxr10SoftProtocolAlarmStatus, zxr10Switch5928E_FISysID=zxr10Switch5928E_FISysID, zxr10RouterR10_1844_ACSysID=zxr10RouterR10_1844_ACSysID, zxr10StatisticsAlarmSaveLevel=zxr10StatisticsAlarmSaveLevel, zxr10HardwareEnvironAlarmDescrip=zxr10HardwareEnvironAlarmDescrip, zxr10RouterR10_2884_ACSysID=zxr10RouterR10_2884_ACSysID, zxr10shelfTable=zxr10shelfTable, zxr10UnitPidAsyQuenRecTimeouts=zxr10UnitPidAsyQuenRecTimeouts, zxr10SystemDeviceType=zxr10SystemDeviceType, zxr10AlarmTrap=zxr10AlarmTrap, zxr10HardwarePortAlarmLevel=zxr10HardwarePortAlarmLevel, zxr10UnitPidAsyQuenUsed=zxr10UnitPidAsyQuenUsed, zxr10HardwareAlarmRackNo=zxr10HardwareAlarmRackNo, zxr10SoftProtocolAlarmSaveType=zxr10SoftProtocolAlarmSaveType, zxr10RouterR10_3821_ACSysID=zxr10RouterR10_3821_ACSysID, zxr10RouterR10_2883_DCSysID=zxr10RouterR10_2883_DCSysID, zxr10SlotOperStatus=zxr10SlotOperStatus, zxr10SystemMemSize=zxr10SystemMemSize, zxctn_6100SysID=zxctn_6100SysID, zxr10PCBVersion=zxr10PCBVersion, zxr10UnitPidSynQuenUsed=zxr10UnitPidSynQuenUsed, zxr10RouterR10_1811E_ACSysID=zxr10RouterR10_1811E_ACSysID, zxr10SystemHostAttr=zxr10SystemHostAttr, zxr10RouterR10_3841_ACSysID=zxr10RouterR10_3841_ACSysID, zxr10RouterR10_3824_ACSysID=zxr10RouterR10_3824_ACSysID, zxr10RouterR10_3844_DCSysID=zxr10RouterR10_3844_DCSysID, zxr10RouterR10_1842_DCSysID=zxr10RouterR10_1842_DCSysID, zxr10Switch3928SysID=zxr10Switch3928SysID, zxr10UnitPidNo=zxr10UnitPidNo, zxr10HardwareAlarmSlotNo=zxr10HardwareAlarmSlotNo, zxr10UnitPidSynQuenSendTimeouts=zxr10UnitPidSynQuenSendTimeouts, zxr10UnitrcvByteGigas=zxr10UnitrcvByteGigas, NpcType=NpcType, zxr10RouterR10_3881_ACSysID=zxr10RouterR10_3881_ACSysID, zxr10Switch5952SysID=zxr10Switch5952SysID, zxr10RouterR10_3823_DCSysID=zxr10RouterR10_3823_DCSysID, zxr10SoftProtocolAlarmSaveCode=zxr10SoftProtocolAlarmSaveCode, zxr10SystemMemUsed=zxr10SystemMemUsed, zxr10StatisticsAlarmLevel=zxr10StatisticsAlarmLevel, PortWorkingType=PortWorkingType, zxr10RouterR10_2821_ACSysID=zxr10RouterR10_2821_ACSysID, zxr10HardwareAlarmPortNo=zxr10HardwareAlarmPortNo, zxr10RouterR10_3843_ACSysID=zxr10RouterR10_3843_ACSysID, zxr10RouterGER4SysID=zxr10RouterGER4SysID, zxr10StatisticsAlarmValueFallingThreshold=zxr10StatisticsAlarmValueFallingThreshold, zxr10SoftProtocolAlarmSlotNo=zxr10SoftProtocolAlarmSlotNo, zxr10PortOperStatus=zxr10PortOperStatus, zxr10StatisticsAlarmStatus=zxr10StatisticsAlarmStatus, zxr10RouterR10_1812_DCSysID=zxr10RouterR10_1812_DCSysID, zxr10RouterR10_2842_ACSysID=zxr10RouterR10_2842_ACSysID, zxr10portEntry=zxr10portEntry, zxr10RouterR10_2844_DCSysID=zxr10RouterR10_2844_DCSysID, zxr10HardwareAlarmTrapTable=zxr10HardwareAlarmTrapTable, zxr10UnitsndMsgs=zxr10UnitsndMsgs, zxr10Switch5928FSysID=zxr10Switch5928FSysID, zxr10StatisticsAlarmDescrip=zxr10StatisticsAlarmDescrip, zxr10SoftProtocolAlarmCode=zxr10SoftProtocolAlarmCode, zxr10SystemCpuUtility5m=zxr10SystemCpuUtility5m, zxr10StatisticsAlarmRackNo=zxr10StatisticsAlarmRackNo, zxr10HardwarePortAlarmPortNo=zxr10HardwarePortAlarmPortNo, zxr10RouterR10_2822_ACSysID=zxr10RouterR10_2822_ACSysID, zxr10UnitPidEntry=zxr10UnitPidEntry, zxr10Switch3952ESysID=zxr10Switch3952ESysID, zxr10SlotBoardType=zxr10SlotBoardType, zxr10PortType=zxr10PortType, zxr10RouterR10_1842_ACSysID=zxr10RouterR10_1842_ACSysID, zxr10PortDesc=zxr10PortDesc, zxr10UnitPidName=zxr10UnitPidName, zxr10RouterR10_1841_ACSysID=zxr10RouterR10_1841_ACSysID, zxr10PortWorkingType=zxr10PortWorkingType, zxr10HardwareAlarmTrapEntry=zxr10HardwareAlarmTrapEntry, zxr10Switch3928E_FISysID=zxr10Switch3928E_FISysID, zxr10StatisticsAlarmCode=zxr10StatisticsAlarmCode, zxr10SystemUnitIndex=zxr10SystemUnitIndex, zxr10Switch5928ESysID=zxr10Switch5928ESysID, zxr10PortIfIndex=zxr10PortIfIndex, zxr10HardwarePortAlarmType=zxr10HardwarePortAlarmType, zxr10StatisticsAlarmTrapEntry=zxr10StatisticsAlarmTrapEntry, zxr10StatisticsAlarmTrap=zxr10StatisticsAlarmTrap, UnitRunStatus=UnitRunStatus, zxr10HardwareBoardAlarmSlotNo=zxr10HardwareBoardAlarmSlotNo, zxr10RouterR10_2841_ACSysID=zxr10RouterR10_2841_ACSysID, zxr10SlotMasterStatus=zxr10SlotMasterStatus, zxr10RouterR10_2883_ACSysID=zxr10RouterR10_2883_ACSysID, zxr10UnitrcvMsgs=zxr10UnitrcvMsgs, zxr10BootVersion=zxr10BootVersion, zxr10RouterT128SysID=zxr10RouterT128SysID, zxr10Switch3952SysID=zxr10Switch3952SysID, zxr10SystemPeakCpuUtility=zxr10SystemPeakCpuUtility, zxr10RouterGAR_3608SysID=zxr10RouterGAR_3608SysID, zxr10Switch5252ASysID=zxr10Switch5252ASysID, zxr10UnitCommStatEntry=zxr10UnitCommStatEntry, zxr10SoftProtocolAlarmTime=zxr10SoftProtocolAlarmTime, zxr10StatisticsAlarmPortNo=zxr10StatisticsAlarmPortNo, zxr10PortAvailStatus=zxr10PortAvailStatus, zxr10HardwarePortAlarmSlotNo=zxr10HardwarePortAlarmSlotNo, zxr10HardwareAlarmStatus=zxr10HardwareAlarmStatus, zxr10HardwareBoardAlarmStatus=zxr10HardwareBoardAlarmStatus, zxr10Switch3928A_PSSysID=zxr10Switch3928A_PSSysID, zxr10RouterR10_3822_ACSysID=zxr10RouterR10_3822_ACSysID, zxr10PaneNo=zxr10PaneNo, zxr10slotEntry=zxr10slotEntry, zxr10UnitPidTable=zxr10UnitPidTable, zxr10SoftProtocolAlarmRackNo=zxr10SoftProtocolAlarmRackNo, zxr10Switch_6908SysID=zxr10Switch_6908SysID, zxr10SystemID=zxr10SystemID, zxr10SlotAvailStatus=zxr10SlotAvailStatus, zxr10SystemTrapCommunity=zxr10SystemTrapCommunity, zxr10SlaveQDRSRAMSize=zxr10SlaveQDRSRAMSize, zxr10SlotTemperature=zxr10SlotTemperature, zxr10RouterR10_1821E_DCSysID=zxr10RouterR10_1821E_DCSysID, zxr10Switch_8908SysID=zxr10Switch_8908SysID, zxr10Switch3952ASysID=zxr10Switch3952ASysID, zxr10UnitrcvBytes=zxr10UnitrcvBytes, zxr10RouterR10_1822_ACSysID=zxr10RouterR10_1822_ACSysID, zxr10RouterR10_3881_DCSysID=zxr10RouterR10_3881_DCSysID, zxr10Switch3228A_FISysID=zxr10Switch3228A_FISysID, zxr10RouterR10_3883_ACSysID=zxr10RouterR10_3883_ACSysID, zxr10HardwareEnvironAlarmEntry=zxr10HardwareEnvironAlarmEntry, zxr10SystemUnitTable=zxr10SystemUnitTable, zxr10RouterR10_1812E_DCSysID=zxr10RouterR10_1812E_DCSysID, zxr10HardwareEnvironAlarmStatus=zxr10HardwareEnvironAlarmStatus, zxr10SoftProtocolAlarmTable=zxr10SoftProtocolAlarmTable, zxr10SoftProtocolAlarmTrap=zxr10SoftProtocolAlarmTrap, zxr10HardwarePortAlarmTable=zxr10HardwarePortAlarmTable, zxr10RouterR10_1844_DCSysID=zxr10RouterR10_1844_DCSysID, zxr10HardwareEnvironAlarmShelfNo=zxr10HardwareEnvironAlarmShelfNo, zxr10HardwareBoardAlarmRackNo=zxr10HardwareBoardAlarmRackNo, zxr10RouterR10_3841_DCSysID=zxr10RouterR10_3841_DCSysID, zxr10RouterT600SysID=zxr10RouterT600SysID, zxr10SlotNPCType=zxr10SlotNPCType, zxr10RackNo=zxr10RackNo, zxr10Switch5928SysID=zxr10Switch5928SysID, zxr10Switch5224SysID=zxr10Switch5224SysID, zxr10shelfEntry=zxr10shelfEntry, zxr10Switch_6905SysID=zxr10Switch_6905SysID, zxr10SwitchT64GSysID=zxr10SwitchT64GSysID, zxr10UnitPidAsyQuenSendTimeouts=zxr10UnitPidAsyQuenSendTimeouts, zxr10RouterR10_1822E_ACSysID=zxr10RouterR10_1822E_ACSysID, zxr10SwitchT32SysID=zxr10SwitchT32SysID, zxr10RouterR10_3884_DCSysID=zxr10RouterR10_3884_DCSysID, zxr10SystemCpuUtility30s=zxr10SystemCpuUtility30s, zxr10SoftProtocolAlarmSaveLevel=zxr10SoftProtocolAlarmSaveLevel, zxr10RouterR10_2884_DCSysID=zxr10RouterR10_2884_DCSysID, zxr10McodeVer=zxr10McodeVer, MasterStatus=MasterStatus, zxr10RouterGAR_7208SysID=zxr10RouterGAR_7208SysID, zxr10HardwareAlarmCode=zxr10HardwareAlarmCode, zxr10RouterR10_1821E_ACSysID=zxr10RouterR10_1821E_ACSysID, zxr10RouterR10_2882_DCSysID=zxr10RouterR10_2882_DCSysID, zxr10RouterR10_1821_DCSysID=zxr10RouterR10_1821_DCSysID, zxr10RouterR10_1811_DCSysID=zxr10RouterR10_1811_DCSysID, zxr10StatisticsAlarmValueRisingThreshold=zxr10StatisticsAlarmValueRisingThreshold)
mibBuilder.exportSymbols("ZXR10-MIB", zxr10HardwareAlarmValueFallingThreshold=zxr10HardwareAlarmValueFallingThreshold, zxr10RouterGER8SysID=zxr10RouterGER8SysID, zxr10SystemEnableSecret=zxr10SystemEnableSecret, zxr10HardwareAlarmVariableValue=zxr10HardwareAlarmVariableValue, zxr10PosInRack=zxr10PosInRack, zxr10RouterR10_2821_DCSysID=zxr10RouterR10_2821_DCSysID, zxr10Switch_8912SysID=zxr10Switch_8912SysID, zxr10RouterR10_3821_DCSysID=zxr10RouterR10_3821_DCSysID, SystemDeviceType=SystemDeviceType, zxr10SystemVersion=zxr10SystemVersion, zxr10RouterR10_3824_DCSysID=zxr10RouterR10_3824_DCSysID, zxr10HardwareAlarmShelfNo=zxr10HardwareAlarmShelfNo, zxr10RouterR10_3842_ACSysID=zxr10RouterR10_3842_ACSysID, zxr10RouterR10_2844_ACSysID=zxr10RouterR10_2844_ACSysID, zxr10MasterQDRSRAMSize=zxr10MasterQDRSRAMSize, zxr10RouterR10_3844_ACSysID=zxr10RouterR10_3844_ACSysID, zxr10UnitPidUsedStatus=zxr10UnitPidUsedStatus, zxr10PortSpeed=zxr10PortSpeed, zxr10Switch3928ESysID=zxr10Switch3928ESysID, zxr10RouterR10_1841_DCSysID=zxr10RouterR10_1841_DCSysID, zxr10StatisticsAlarmSlotNo=zxr10StatisticsAlarmSlotNo, DisplayString=DisplayString, zxr10rackTable=zxr10rackTable, zxr10HardwareAlarmTrap=zxr10HardwareAlarmTrap, zxr10RackAvailStatus=zxr10RackAvailStatus, zxr10HardwarePortAlarmTime=zxr10HardwarePortAlarmTime, zxr10HardwareAlarmValueRisingThreshold=zxr10HardwareAlarmValueRisingThreshold, zxr10=zxr10, zxr10RouterR10_3883_DCSysID=zxr10RouterR10_3883_DCSysID, zxr10HardwareBoardAlarmTable=zxr10HardwareBoardAlarmTable, zxr10RouterR10_2843_ACSysID=zxr10RouterR10_2843_ACSysID, zxr10HardwarePortAlarmRackNo=zxr10HardwarePortAlarmRackNo, zxr10UnitPidInterruptTimes=zxr10UnitPidInterruptTimes, zxr10StatisticsAlarmTable=zxr10StatisticsAlarmTable, zxr10Switch3226FISysID=zxr10Switch3226FISysID, BoardType=BoardType, zxr10Switch3928AFISysID=zxr10Switch3928AFISysID, zxr10UnitPidCalledTimes=zxr10UnitPidCalledTimes, zxr10UnitPidSynQuenRecTimeouts=zxr10UnitPidSynQuenRecTimeouts, zxr10UnitPidTimerNamedUsed=zxr10UnitPidTimerNamedUsed, zxr10SoftProtocolAlarmDescrip=zxr10SoftProtocolAlarmDescrip, zxr10PortMTU=zxr10PortMTU, zxr10ReceiveTick=zxr10ReceiveTick, zxr10SwitchT160GSysID=zxr10SwitchT160GSysID, zxr10systemconfig=zxr10systemconfig, zxr10SystemserialNo=zxr10SystemserialNo, zxr10StatisticsAlarmType=zxr10StatisticsAlarmType, zxr10StatisticsAlarmSaveCode=zxr10StatisticsAlarmSaveCode, zxr10HardwarePortAlarmShelfNo=zxr10HardwarePortAlarmShelfNo, zxr10HardwarePortAlarmStatus=zxr10HardwarePortAlarmStatus, zxr10SlotPortsNumber=zxr10SlotPortsNumber, zxr10Switch_6902SysID=zxr10Switch_6902SysID, zxr10HardwareEnvironAlarmTime=zxr10HardwareEnvironAlarmTime, zxr10SystemDescrip=zxr10SystemDescrip, PortType=PortType, zxr10_objectID=zxr10_objectID, zxr10SoftProtocolAlarmLevel=zxr10SoftProtocolAlarmLevel, zxr10HardwarePortAlarmEntry=zxr10HardwarePortAlarmEntry, zxr10Switch5228FSysID=zxr10Switch5228FSysID, zxr10UnitsndBytes=zxr10UnitsndBytes, zxr10SystemVpnName=zxr10SystemVpnName, zxr10UnitNo=zxr10UnitNo, zxr10HardwarePortAlarmDescrip=zxr10HardwarePortAlarmDescrip, zxr10SubcardMax=zxr10SubcardMax, zxr10Switch_8905SysID=zxr10Switch_8905SysID, zxr10Switch3206SysID=zxr10Switch3206SysID, zxr10RouterR10_3822_DCSysID=zxr10RouterR10_3822_DCSysID, zxr10UnitPidAsyQuenMsgMax=zxr10UnitPidAsyQuenMsgMax, zxr10RouterR10_2881_DCSysID=zxr10RouterR10_2881_DCSysID, zxr10RouterR10_2822_DCSysID=zxr10RouterR10_2822_DCSysID, zxr10PortNo=zxr10PortNo, zxr10SystemUnitEntry=zxr10SystemUnitEntry, HostAttr=HostAttr, zxr10RouterR10_2842_DCSysID=zxr10RouterR10_2842_DCSysID, zxr10FPGAVer=zxr10FPGAVer, zxr10RouterR10_1811_ACSysID=zxr10RouterR10_1811_ACSysID, zxr10StatisticsAlarmSaveType=zxr10StatisticsAlarmSaveType, zxr10RouterR10_1821_ACSysID=zxr10RouterR10_1821_ACSysID, zxr10RouterGER2SysID=zxr10RouterGER2SysID, zxr10UnitCommStatTable=zxr10UnitCommStatTable, zxr10HardwareBoardAlarmType=zxr10HardwareBoardAlarmType, zxr10SoftProtocolAlarmType=zxr10SoftProtocolAlarmType, zxr10Switch3226SysID=zxr10Switch3226SysID, zxr10_alarm=zxr10_alarm, zxr10Switch3228SysID=zxr10Switch3228SysID, zxr10ShelfAttrib=zxr10ShelfAttrib, zxr10UnitPidSynQuenBlocked=zxr10UnitPidSynQuenBlocked, zxr10HardwareBoardAlarmDescrip=zxr10HardwareBoardAlarmDescrip, zxr10slotTable=zxr10slotTable, zxr10RouterR10_2882_ACSysID=zxr10RouterR10_2882_ACSysID, zxr10PortProtocolStatus=zxr10PortProtocolStatus, zxr10Switch5252SysID=zxr10Switch5252SysID)
| 131.896584 | 12,577 | 0.780598 | 16,250 | 139,019 | 6.627015 | 0.090769 | 0.071539 | 0.125194 | 0.01222 | 0.435253 | 0.311285 | 0.251521 | 0.196742 | 0.158224 | 0.13083 | 0 | 0.152103 | 0.079838 | 139,019 | 1,053 | 12,578 | 132.021842 | 0.689744 | 0.002244 | 0 | 0.008746 | 0 | 0.013605 | 0.224834 | 0.0576 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.000972 | 0.005831 | 0 | 0.05345 | 0.000972 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
778f781ff9cc65cbd0f42226b7afbf7f0c69cc2b | 75 | py | Python | oj/custom_settings.example.py | gogiluv/testrepo | 55e4a905b70a460e74b8116a130b720821eaf2ce | [
"MIT"
] | 1 | 2018-01-28T07:48:13.000Z | 2018-01-28T07:48:13.000Z | oj/custom_settings.example.py | OnlineJudgeNextGeneration/qduoj2 | c4889d70850bd91ae7f662c02524d0555b6a3ce7 | [
"MIT"
] | null | null | null | oj/custom_settings.example.py | OnlineJudgeNextGeneration/qduoj2 | c4889d70850bd91ae7f662c02524d0555b6a3ce7 | [
"MIT"
] | 1 | 2020-09-29T14:21:27.000Z | 2020-09-29T14:21:27.000Z | # please set your own SECRET_KEY to a long random string
# SECRET_KEY = ""
| 25 | 56 | 0.733333 | 13 | 75 | 4.076923 | 0.846154 | 0.339623 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 75 | 2 | 57 | 37.5 | 0.883333 | 0.933333 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 5 |
778fbcef49997f0ef563feb1bdf7e1bde1261eac | 62,860 | py | Python | sdk/python/pulumi_gcp/compute/route.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 121 | 2018-06-18T19:16:42.000Z | 2022-03-31T06:06:48.000Z | sdk/python/pulumi_gcp/compute/route.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 492 | 2018-06-22T19:41:03.000Z | 2022-03-31T15:33:53.000Z | sdk/python/pulumi_gcp/compute/route.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 43 | 2018-06-19T01:43:13.000Z | 2022-03-23T22:43:37.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RouteArgs', 'Route']
@pulumi.input_type
class RouteArgs:
def __init__(__self__, *,
dest_range: pulumi.Input[str],
network: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Route resource.
:param pulumi.Input[str] dest_range: The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
:param pulumi.Input[str] network: The network that this route applies to.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property
when you create the resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] next_hop_gateway: URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
:param pulumi.Input[str] next_hop_ilb: The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
:param pulumi.Input[str] next_hop_instance: URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
:param pulumi.Input[str] next_hop_instance_zone: (Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
:param pulumi.Input[str] next_hop_ip: Network IP address of an instance that should handle matching packets.
:param pulumi.Input[str] next_hop_vpn_tunnel: URL to a VpnTunnel that should handle matching packets.
:param pulumi.Input[int] priority: The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of instance tags to which this route applies.
"""
pulumi.set(__self__, "dest_range", dest_range)
pulumi.set(__self__, "network", network)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if next_hop_gateway is not None:
pulumi.set(__self__, "next_hop_gateway", next_hop_gateway)
if next_hop_ilb is not None:
pulumi.set(__self__, "next_hop_ilb", next_hop_ilb)
if next_hop_instance is not None:
pulumi.set(__self__, "next_hop_instance", next_hop_instance)
if next_hop_instance_zone is not None:
pulumi.set(__self__, "next_hop_instance_zone", next_hop_instance_zone)
if next_hop_ip is not None:
pulumi.set(__self__, "next_hop_ip", next_hop_ip)
if next_hop_vpn_tunnel is not None:
pulumi.set(__self__, "next_hop_vpn_tunnel", next_hop_vpn_tunnel)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if project is not None:
pulumi.set(__self__, "project", project)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="destRange")
def dest_range(self) -> pulumi.Input[str]:
"""
The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
"""
return pulumi.get(self, "dest_range")
@dest_range.setter
def dest_range(self, value: pulumi.Input[str]):
pulumi.set(self, "dest_range", value)
@property
@pulumi.getter
def network(self) -> pulumi.Input[str]:
"""
The network that this route applies to.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: pulumi.Input[str]):
pulumi.set(self, "network", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource. Provide this property
when you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nextHopGateway")
def next_hop_gateway(self) -> Optional[pulumi.Input[str]]:
"""
URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
"""
return pulumi.get(self, "next_hop_gateway")
@next_hop_gateway.setter
def next_hop_gateway(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_gateway", value)
@property
@pulumi.getter(name="nextHopIlb")
def next_hop_ilb(self) -> Optional[pulumi.Input[str]]:
"""
The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
"""
return pulumi.get(self, "next_hop_ilb")
@next_hop_ilb.setter
def next_hop_ilb(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ilb", value)
@property
@pulumi.getter(name="nextHopInstance")
def next_hop_instance(self) -> Optional[pulumi.Input[str]]:
"""
URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
"""
return pulumi.get(self, "next_hop_instance")
@next_hop_instance.setter
def next_hop_instance(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_instance", value)
@property
@pulumi.getter(name="nextHopInstanceZone")
def next_hop_instance_zone(self) -> Optional[pulumi.Input[str]]:
"""
(Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
"""
return pulumi.get(self, "next_hop_instance_zone")
@next_hop_instance_zone.setter
def next_hop_instance_zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_instance_zone", value)
@property
@pulumi.getter(name="nextHopIp")
def next_hop_ip(self) -> Optional[pulumi.Input[str]]:
"""
Network IP address of an instance that should handle matching packets.
"""
return pulumi.get(self, "next_hop_ip")
@next_hop_ip.setter
def next_hop_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ip", value)
@property
@pulumi.getter(name="nextHopVpnTunnel")
def next_hop_vpn_tunnel(self) -> Optional[pulumi.Input[str]]:
"""
URL to a VpnTunnel that should handle matching packets.
"""
return pulumi.get(self, "next_hop_vpn_tunnel")
@next_hop_vpn_tunnel.setter
def next_hop_vpn_tunnel(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_vpn_tunnel", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of instance tags to which this route applies.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _RouteState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
dest_range: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_network: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Route resources.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property
when you create the resource.
:param pulumi.Input[str] dest_range: The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] network: The network that this route applies to.
:param pulumi.Input[str] next_hop_gateway: URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
:param pulumi.Input[str] next_hop_ilb: The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
:param pulumi.Input[str] next_hop_instance: URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
:param pulumi.Input[str] next_hop_instance_zone: (Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
:param pulumi.Input[str] next_hop_ip: Network IP address of an instance that should handle matching packets.
:param pulumi.Input[str] next_hop_network: URL to a Network that should handle matching packets.
:param pulumi.Input[str] next_hop_vpn_tunnel: URL to a VpnTunnel that should handle matching packets.
:param pulumi.Input[int] priority: The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of instance tags to which this route applies.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if dest_range is not None:
pulumi.set(__self__, "dest_range", dest_range)
if name is not None:
pulumi.set(__self__, "name", name)
if network is not None:
pulumi.set(__self__, "network", network)
if next_hop_gateway is not None:
pulumi.set(__self__, "next_hop_gateway", next_hop_gateway)
if next_hop_ilb is not None:
pulumi.set(__self__, "next_hop_ilb", next_hop_ilb)
if next_hop_instance is not None:
pulumi.set(__self__, "next_hop_instance", next_hop_instance)
if next_hop_instance_zone is not None:
pulumi.set(__self__, "next_hop_instance_zone", next_hop_instance_zone)
if next_hop_ip is not None:
pulumi.set(__self__, "next_hop_ip", next_hop_ip)
if next_hop_network is not None:
pulumi.set(__self__, "next_hop_network", next_hop_network)
if next_hop_vpn_tunnel is not None:
pulumi.set(__self__, "next_hop_vpn_tunnel", next_hop_vpn_tunnel)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if project is not None:
pulumi.set(__self__, "project", project)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource. Provide this property
when you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="destRange")
def dest_range(self) -> Optional[pulumi.Input[str]]:
"""
The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
"""
return pulumi.get(self, "dest_range")
@dest_range.setter
def dest_range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dest_range", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def network(self) -> Optional[pulumi.Input[str]]:
"""
The network that this route applies to.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="nextHopGateway")
def next_hop_gateway(self) -> Optional[pulumi.Input[str]]:
"""
URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
"""
return pulumi.get(self, "next_hop_gateway")
@next_hop_gateway.setter
def next_hop_gateway(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_gateway", value)
@property
@pulumi.getter(name="nextHopIlb")
def next_hop_ilb(self) -> Optional[pulumi.Input[str]]:
"""
The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
"""
return pulumi.get(self, "next_hop_ilb")
@next_hop_ilb.setter
def next_hop_ilb(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ilb", value)
@property
@pulumi.getter(name="nextHopInstance")
def next_hop_instance(self) -> Optional[pulumi.Input[str]]:
"""
URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
"""
return pulumi.get(self, "next_hop_instance")
@next_hop_instance.setter
def next_hop_instance(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_instance", value)
@property
@pulumi.getter(name="nextHopInstanceZone")
def next_hop_instance_zone(self) -> Optional[pulumi.Input[str]]:
"""
(Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
"""
return pulumi.get(self, "next_hop_instance_zone")
@next_hop_instance_zone.setter
def next_hop_instance_zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_instance_zone", value)
@property
@pulumi.getter(name="nextHopIp")
def next_hop_ip(self) -> Optional[pulumi.Input[str]]:
"""
Network IP address of an instance that should handle matching packets.
"""
return pulumi.get(self, "next_hop_ip")
@next_hop_ip.setter
def next_hop_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ip", value)
@property
@pulumi.getter(name="nextHopNetwork")
def next_hop_network(self) -> Optional[pulumi.Input[str]]:
"""
URL to a Network that should handle matching packets.
"""
return pulumi.get(self, "next_hop_network")
@next_hop_network.setter
def next_hop_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_network", value)
@property
@pulumi.getter(name="nextHopVpnTunnel")
def next_hop_vpn_tunnel(self) -> Optional[pulumi.Input[str]]:
"""
URL to a VpnTunnel that should handle matching packets.
"""
return pulumi.get(self, "next_hop_vpn_tunnel")
@next_hop_vpn_tunnel.setter
def next_hop_vpn_tunnel(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_vpn_tunnel", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of instance tags to which this route applies.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class Route(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
dest_range: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Represents a Route resource.
A route is a rule that specifies how certain packets should be handled by
the virtual network. Routes are associated with virtual machines by tag,
and the set of routes for a particular virtual machine is called its
routing table. For each packet leaving a virtual machine, the system
searches that virtual machine's routing table for a single best matching
route.
Routes match packets by destination IP address, preferring smaller or more
specific ranges over larger ones. If there is a tie, the system selects
the route with the smallest priority value. If there is still a tie, it
uses the layer three and four packet headers to select just one of the
remaining matching routes. The packet is then forwarded as specified by
the next_hop field of the winning route -- either to another virtual
machine destination, a virtual machine gateway or a Compute
Engine-operated gateway. Packets that do not match any route in the
sending virtual machine's routing table will be dropped.
A Route resource must have exactly one specification of either
nextHopGateway, nextHopInstance, nextHopIp, nextHopVpnTunnel, or
nextHopIlb.
To get more information about Route, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/routes)
* How-to Guides
* [Using Routes](https://cloud.google.com/vpc/docs/using-routes)
## Example Usage
### Route Basic
```python
import pulumi
import pulumi_gcp as gcp
default_network = gcp.compute.Network("defaultNetwork")
default_route = gcp.compute.Route("defaultRoute",
dest_range="15.0.0.0/24",
network=default_network.name,
next_hop_ip="10.132.1.5",
priority=100)
```
### Route Ilb
```python
import pulumi
import pulumi_gcp as gcp
default_network = gcp.compute.Network("defaultNetwork", auto_create_subnetworks=False)
default_subnetwork = gcp.compute.Subnetwork("defaultSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=default_network.id)
hc = gcp.compute.HealthCheck("hc",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
))
backend = gcp.compute.RegionBackendService("backend",
region="us-central1",
health_checks=[hc.id])
default_forwarding_rule = gcp.compute.ForwardingRule("defaultForwardingRule",
region="us-central1",
load_balancing_scheme="INTERNAL",
backend_service=backend.id,
all_ports=True,
network=default_network.name,
subnetwork=default_subnetwork.name)
route_ilb = gcp.compute.Route("route-ilb",
dest_range="0.0.0.0/0",
network=default_network.name,
next_hop_ilb=default_forwarding_rule.id,
priority=2000)
```
### Route Ilb Vip
```python
import pulumi
import pulumi_gcp as gcp
producer_network = gcp.compute.Network("producerNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
producer_subnetwork = gcp.compute.Subnetwork("producerSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=producer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
consumer_network = gcp.compute.Network("consumerNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
consumer_subnetwork = gcp.compute.Subnetwork("consumerSubnetwork",
ip_cidr_range="10.0.2.0/24",
region="us-central1",
network=consumer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
peering1 = gcp.compute.NetworkPeering("peering1",
network=consumer_network.id,
peer_network=producer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
peering2 = gcp.compute.NetworkPeering("peering2",
network=producer_network.id,
peer_network=consumer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
hc = gcp.compute.HealthCheck("hc",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
),
opts=pulumi.ResourceOptions(provider=google_beta))
backend = gcp.compute.RegionBackendService("backend",
region="us-central1",
health_checks=[hc.id],
opts=pulumi.ResourceOptions(provider=google_beta))
default = gcp.compute.ForwardingRule("default",
region="us-central1",
load_balancing_scheme="INTERNAL",
backend_service=backend.id,
all_ports=True,
network=producer_network.name,
subnetwork=producer_subnetwork.name,
opts=pulumi.ResourceOptions(provider=google_beta))
route_ilb = gcp.compute.Route("route-ilb",
dest_range="0.0.0.0/0",
network=consumer_network.name,
next_hop_ilb=default.ip_address,
priority=2000,
tags=[
"tag1",
"tag2",
],
opts=pulumi.ResourceOptions(provider=google_beta,
depends_on=[
peering1,
peering2,
]))
```
## Import
Route can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/route:Route default projects/{{project}}/global/routes/{{name}}
```
```sh
$ pulumi import gcp:compute/route:Route default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/route:Route default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property
when you create the resource.
:param pulumi.Input[str] dest_range: The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] network: The network that this route applies to.
:param pulumi.Input[str] next_hop_gateway: URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
:param pulumi.Input[str] next_hop_ilb: The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
:param pulumi.Input[str] next_hop_instance: URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
:param pulumi.Input[str] next_hop_instance_zone: (Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
:param pulumi.Input[str] next_hop_ip: Network IP address of an instance that should handle matching packets.
:param pulumi.Input[str] next_hop_vpn_tunnel: URL to a VpnTunnel that should handle matching packets.
:param pulumi.Input[int] priority: The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of instance tags to which this route applies.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RouteArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a Route resource.
A route is a rule that specifies how certain packets should be handled by
the virtual network. Routes are associated with virtual machines by tag,
and the set of routes for a particular virtual machine is called its
routing table. For each packet leaving a virtual machine, the system
searches that virtual machine's routing table for a single best matching
route.
Routes match packets by destination IP address, preferring smaller or more
specific ranges over larger ones. If there is a tie, the system selects
the route with the smallest priority value. If there is still a tie, it
uses the layer three and four packet headers to select just one of the
remaining matching routes. The packet is then forwarded as specified by
the next_hop field of the winning route -- either to another virtual
machine destination, a virtual machine gateway or a Compute
Engine-operated gateway. Packets that do not match any route in the
sending virtual machine's routing table will be dropped.
A Route resource must have exactly one specification of either
nextHopGateway, nextHopInstance, nextHopIp, nextHopVpnTunnel, or
nextHopIlb.
To get more information about Route, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/routes)
* How-to Guides
* [Using Routes](https://cloud.google.com/vpc/docs/using-routes)
## Example Usage
### Route Basic
```python
import pulumi
import pulumi_gcp as gcp
default_network = gcp.compute.Network("defaultNetwork")
default_route = gcp.compute.Route("defaultRoute",
dest_range="15.0.0.0/24",
network=default_network.name,
next_hop_ip="10.132.1.5",
priority=100)
```
### Route Ilb
```python
import pulumi
import pulumi_gcp as gcp
default_network = gcp.compute.Network("defaultNetwork", auto_create_subnetworks=False)
default_subnetwork = gcp.compute.Subnetwork("defaultSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=default_network.id)
hc = gcp.compute.HealthCheck("hc",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
))
backend = gcp.compute.RegionBackendService("backend",
region="us-central1",
health_checks=[hc.id])
default_forwarding_rule = gcp.compute.ForwardingRule("defaultForwardingRule",
region="us-central1",
load_balancing_scheme="INTERNAL",
backend_service=backend.id,
all_ports=True,
network=default_network.name,
subnetwork=default_subnetwork.name)
route_ilb = gcp.compute.Route("route-ilb",
dest_range="0.0.0.0/0",
network=default_network.name,
next_hop_ilb=default_forwarding_rule.id,
priority=2000)
```
### Route Ilb Vip
```python
import pulumi
import pulumi_gcp as gcp
producer_network = gcp.compute.Network("producerNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
producer_subnetwork = gcp.compute.Subnetwork("producerSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=producer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
consumer_network = gcp.compute.Network("consumerNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
consumer_subnetwork = gcp.compute.Subnetwork("consumerSubnetwork",
ip_cidr_range="10.0.2.0/24",
region="us-central1",
network=consumer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
peering1 = gcp.compute.NetworkPeering("peering1",
network=consumer_network.id,
peer_network=producer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
peering2 = gcp.compute.NetworkPeering("peering2",
network=producer_network.id,
peer_network=consumer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
hc = gcp.compute.HealthCheck("hc",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
),
opts=pulumi.ResourceOptions(provider=google_beta))
backend = gcp.compute.RegionBackendService("backend",
region="us-central1",
health_checks=[hc.id],
opts=pulumi.ResourceOptions(provider=google_beta))
default = gcp.compute.ForwardingRule("default",
region="us-central1",
load_balancing_scheme="INTERNAL",
backend_service=backend.id,
all_ports=True,
network=producer_network.name,
subnetwork=producer_subnetwork.name,
opts=pulumi.ResourceOptions(provider=google_beta))
route_ilb = gcp.compute.Route("route-ilb",
dest_range="0.0.0.0/0",
network=consumer_network.name,
next_hop_ilb=default.ip_address,
priority=2000,
tags=[
"tag1",
"tag2",
],
opts=pulumi.ResourceOptions(provider=google_beta,
depends_on=[
peering1,
peering2,
]))
```
## Import
Route can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/route:Route default projects/{{project}}/global/routes/{{name}}
```
```sh
$ pulumi import gcp:compute/route:Route default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/route:Route default {{name}}
```
:param str resource_name: The name of the resource.
:param RouteArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RouteArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
dest_range: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RouteArgs.__new__(RouteArgs)
__props__.__dict__["description"] = description
if dest_range is None and not opts.urn:
raise TypeError("Missing required property 'dest_range'")
__props__.__dict__["dest_range"] = dest_range
__props__.__dict__["name"] = name
if network is None and not opts.urn:
raise TypeError("Missing required property 'network'")
__props__.__dict__["network"] = network
__props__.__dict__["next_hop_gateway"] = next_hop_gateway
__props__.__dict__["next_hop_ilb"] = next_hop_ilb
__props__.__dict__["next_hop_instance"] = next_hop_instance
__props__.__dict__["next_hop_instance_zone"] = next_hop_instance_zone
__props__.__dict__["next_hop_ip"] = next_hop_ip
__props__.__dict__["next_hop_vpn_tunnel"] = next_hop_vpn_tunnel
__props__.__dict__["priority"] = priority
__props__.__dict__["project"] = project
__props__.__dict__["tags"] = tags
__props__.__dict__["next_hop_network"] = None
__props__.__dict__["self_link"] = None
super(Route, __self__).__init__(
'gcp:compute/route:Route',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
dest_range: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_network: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Route':
"""
Get an existing Route resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property
when you create the resource.
:param pulumi.Input[str] dest_range: The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] network: The network that this route applies to.
:param pulumi.Input[str] next_hop_gateway: URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
:param pulumi.Input[str] next_hop_ilb: The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
:param pulumi.Input[str] next_hop_instance: URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
:param pulumi.Input[str] next_hop_instance_zone: (Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
:param pulumi.Input[str] next_hop_ip: Network IP address of an instance that should handle matching packets.
:param pulumi.Input[str] next_hop_network: URL to a Network that should handle matching packets.
:param pulumi.Input[str] next_hop_vpn_tunnel: URL to a VpnTunnel that should handle matching packets.
:param pulumi.Input[int] priority: The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of instance tags to which this route applies.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RouteState.__new__(_RouteState)
__props__.__dict__["description"] = description
__props__.__dict__["dest_range"] = dest_range
__props__.__dict__["name"] = name
__props__.__dict__["network"] = network
__props__.__dict__["next_hop_gateway"] = next_hop_gateway
__props__.__dict__["next_hop_ilb"] = next_hop_ilb
__props__.__dict__["next_hop_instance"] = next_hop_instance
__props__.__dict__["next_hop_instance_zone"] = next_hop_instance_zone
__props__.__dict__["next_hop_ip"] = next_hop_ip
__props__.__dict__["next_hop_network"] = next_hop_network
__props__.__dict__["next_hop_vpn_tunnel"] = next_hop_vpn_tunnel
__props__.__dict__["priority"] = priority
__props__.__dict__["project"] = project
__props__.__dict__["self_link"] = self_link
__props__.__dict__["tags"] = tags
return Route(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional description of this resource. Provide this property
when you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="destRange")
def dest_range(self) -> pulumi.Output[str]:
"""
The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
"""
return pulumi.get(self, "dest_range")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> pulumi.Output[str]:
"""
The network that this route applies to.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="nextHopGateway")
def next_hop_gateway(self) -> pulumi.Output[Optional[str]]:
"""
URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
"""
return pulumi.get(self, "next_hop_gateway")
@property
@pulumi.getter(name="nextHopIlb")
def next_hop_ilb(self) -> pulumi.Output[Optional[str]]:
"""
The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
"""
return pulumi.get(self, "next_hop_ilb")
@property
@pulumi.getter(name="nextHopInstance")
def next_hop_instance(self) -> pulumi.Output[Optional[str]]:
"""
URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
"""
return pulumi.get(self, "next_hop_instance")
@property
@pulumi.getter(name="nextHopInstanceZone")
def next_hop_instance_zone(self) -> pulumi.Output[str]:
"""
(Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
"""
return pulumi.get(self, "next_hop_instance_zone")
@property
@pulumi.getter(name="nextHopIp")
def next_hop_ip(self) -> pulumi.Output[str]:
"""
Network IP address of an instance that should handle matching packets.
"""
return pulumi.get(self, "next_hop_ip")
@property
@pulumi.getter(name="nextHopNetwork")
def next_hop_network(self) -> pulumi.Output[str]:
"""
URL to a Network that should handle matching packets.
"""
return pulumi.get(self, "next_hop_network")
@property
@pulumi.getter(name="nextHopVpnTunnel")
def next_hop_vpn_tunnel(self) -> pulumi.Output[Optional[str]]:
"""
URL to a VpnTunnel that should handle matching packets.
"""
return pulumi.get(self, "next_hop_vpn_tunnel")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[Optional[int]]:
"""
The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of instance tags to which this route applies.
"""
return pulumi.get(self, "tags")
| 46.59748 | 134 | 0.64071 | 7,769 | 62,860 | 5.02304 | 0.0502 | 0.042871 | 0.060988 | 0.056939 | 0.955899 | 0.948186 | 0.939038 | 0.931427 | 0.927865 | 0.923893 | 0 | 0.009014 | 0.271126 | 62,860 | 1,348 | 135 | 46.632047 | 0.842722 | 0.547598 | 0 | 0.814894 | 1 | 0 | 0.092383 | 0.009844 | 0 | 0 | 0 | 0 | 0 | 1 | 0.165957 | false | 0.002128 | 0.010638 | 0 | 0.276596 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
77918e9deea502e2876228cdb462978e97466087 | 2,570 | py | Python | bingads/v13/bulk/entities/audiences/bulk_remarketing_list.py | pawelulita/BingAds-Python-SDK | e7b5a618e87a43d0a5e2c79d9aa4626e208797bd | [
"MIT"
] | 86 | 2016-02-29T03:24:28.000Z | 2022-03-29T09:30:21.000Z | bingads/v13/bulk/entities/audiences/bulk_remarketing_list.py | pawelulita/BingAds-Python-SDK | e7b5a618e87a43d0a5e2c79d9aa4626e208797bd | [
"MIT"
] | 135 | 2016-04-12T13:31:28.000Z | 2022-03-29T02:18:51.000Z | bingads/v13/bulk/entities/audiences/bulk_remarketing_list.py | pawelulita/BingAds-Python-SDK | e7b5a618e87a43d0a5e2c79d9aa4626e208797bd | [
"MIT"
] | 154 | 2016-04-08T04:11:27.000Z | 2022-03-29T21:21:07.000Z | from bingads.v13.bulk.entities import *
from bingads.service_client import _CAMPAIGN_OBJECT_FACTORY_V13
from bingads.v13.internal.bulk.entities.single_record_bulk_entity import _SingleRecordBulkEntity
from bingads.v13.internal.bulk.mappings import _SimpleBulkMapping
from bingads.v13.internal.bulk.string_table import _StringTable
from bingads.v13.internal.extensions import *
from .bulk_audience import BulkAudience
class BulkRemarketingList(BulkAudience):
""" Represents an Remarketing List that can be read or written in a bulk file.
This class exposes the :attr:`remarketing_list` property that can be read and written as fields of the
Remarketing List record in a bulk file.
For more information, see Remarketing List at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self,
remarketing_list=None,
status=None,):
super(BulkRemarketingList, self).__init__(audience = remarketing_list, status = status)
_MAPPINGS = [
_SimpleBulkMapping(
_StringTable.TagId,
field_to_csv=lambda c: bulk_str(c.remarketing_list.TagId),
csv_to_field=lambda c, v: setattr(c.remarketing_list, 'TagId', int(v) if v else None)
),
_SimpleBulkMapping(
_StringTable.RemarketingRule,
field_to_csv=lambda c: field_to_csv_RemarketingRule(c.remarketing_list),
csv_to_field=lambda c, v: csv_to_field_RemarketingRule(c.remarketing_list, v)
),
]
@property
def remarketing_list(self):
""" Defines a Remarketing List """
return self._audience
@remarketing_list.setter
def remarketing_list(self, remarketing_list):
self._audience = remarketing_list
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.remarketing_list, 'remarketing_list')
super(BulkRemarketingList, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkRemarketingList._MAPPINGS)
def process_mappings_from_row_values(self, row_values):
self.remarketing_list = _CAMPAIGN_OBJECT_FACTORY_V13.create('RemarketingList')
super(BulkRemarketingList, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkRemarketingList._MAPPINGS)
| 39.538462 | 106 | 0.731518 | 303 | 2,570 | 5.881188 | 0.326733 | 0.159933 | 0.039282 | 0.049383 | 0.220539 | 0.056117 | 0 | 0 | 0 | 0 | 0 | 0.009602 | 0.189494 | 2,570 | 64 | 107 | 40.15625 | 0.845895 | 0.180934 | 0 | 0.105263 | 0 | 0 | 0.017535 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.131579 | false | 0 | 0.184211 | 0 | 0.394737 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77936e8ca10f6e684602ec0fcab350ddfe22ac60 | 24,154 | py | Python | src/daft_exprt/extract_features.py | ishine/ubisoft-laforge-daft-exprt | a576691c8c42988f813183efcea43c1677abe17a | [
"Apache-2.0"
] | 33 | 2021-09-17T18:32:23.000Z | 2022-03-01T21:05:08.000Z | src/daft_exprt/extract_features.py | ishine/ubisoft-laforge-daft-exprt | a576691c8c42988f813183efcea43c1677abe17a | [
"Apache-2.0"
] | 5 | 2021-12-07T04:23:04.000Z | 2022-03-15T07:37:13.000Z | src/daft_exprt/extract_features.py | ishine/ubisoft-laforge-daft-exprt | a576691c8c42988f813183efcea43c1677abe17a | [
"Apache-2.0"
] | 7 | 2021-09-16T02:24:02.000Z | 2022-01-11T07:48:19.000Z | import json
import logging
import logging.handlers
import os
import re
import subprocess
import types
import uuid
import librosa
import numpy as np
import torch
from shutil import rmtree
from librosa.filters import mel as librosa_mel_fn
from scipy.io import wavfile
from daft_exprt.symbols import ascii, eos, punctuation, SIL_WORD_SYMBOL, whitespace
from daft_exprt.utils import launch_multi_process
_logger = logging.getLogger(__name__)
FILE_ROOT = os.path.dirname(os.path.realpath(__file__))
TMP_DIR = os.path.join(FILE_ROOT, 'tmp')
FEATURES_HPARAMS = ['centered', 'cutoff', 'f0_interval', 'filter_length', 'hop_length',
'language', 'mel_fmax', 'mel_fmin', 'min_clipping', 'max_f0', 'min_f0',
'n_mel_channels', 'order', 'sampling_rate', 'symbols', 'uv_cost', 'uv_interval']
def check_features_config_used(features_dir, hparams):
''' Check current config is the same than the one used in features directory
'''
# hyper-params that are important for feature extraction
same_config = True
for root, _, file_names in os.walk(os.path.normpath(features_dir)):
# extract config files
configs = [x for x in file_names if x.endswith('.json')]
if len(configs) != 0:
# get previous config
with open(os.path.join(root, configs[0])) as f:
data = f.read()
config = json.loads(data)
hparams_prev = types.SimpleNamespace(**config)
# compare params
for param in FEATURES_HPARAMS:
if getattr(hparams, param) != getattr(hparams_prev, param):
same_config = False
_logger.warning(f'Parameter "{param}" is different in "{root}" -- '
f'Was {getattr(hparams_prev, param)} and now is {getattr(hparams, param)}')
return same_config
def get_min_phone_duration(lines, min_phone_dur=1000.):
''' Extract shortest phone duration in the current .markers file
'''
# iterate over phones
for line in lines:
line = line.strip().split(sep='\t')
# extract phone duration
begin, end = float(line[0]), float(line[1])
if end - begin < min_phone_dur:
min_phone_dur = end - begin
return min_phone_dur
def duration_to_integer(float_durations, hparams, nb_samples=None):
''' Convert phoneme float durations to integer frame durations
'''
# estimate number of samples in audio
if nb_samples is None:
# get total duration of audio
# float_durations = [[phone_begin, phone_end], ...]
total_duration = sum([(x[1] - x[0]) for x in float_durations])
# convert in number of samples
nb_samples = int(total_duration * hparams.sampling_rate)
# get nb spectrogram frames
# ignore padding for the moment
nb_frames = 1 + int((nb_samples - hparams.filter_length) / hparams.hop_length)
# get spectrogram frames index
frames_idx = [int(hparams.filter_length / 2) + hparams.hop_length * i for i in range(nb_frames)]
# compute number of frames per phoneme
curr_frame = 1
int_durations = []
while curr_frame <= nb_frames:
# extract phoneme duration
begin, end = float_durations.pop(0)
if begin != end:
# convert to sample idx
begin, end = int(begin * hparams.sampling_rate), int(end * hparams.sampling_rate)
# get corresponding frames
nb_phone_frames = len([idx for idx in frames_idx if begin < idx <= end])
int_durations.append(nb_phone_frames)
curr_frame += nb_phone_frames
else: # we should not have 0 durations
raise ValueError
# add edge frames if padding is on
if hparams.centered:
nb_edge_frames = int(hparams.filter_length / 2 / hparams.hop_length)
# left padding
int_durations[0] += nb_edge_frames
# right padding
if len(float_durations) != 0: # correspond to last phoneme
int_durations.append(nb_edge_frames)
else:
int_durations[-1] += nb_edge_frames
return int_durations
def update_markers(file_name, lines, sentence, sent_begin, int_durations, hparams, logger):
''' Update markers:
- change timings to start from 0
- add punctuation or whitespace at word boundaries
- add EOS token at end of sentence
- add int durations
'''
# characters to consider in the sentence
if hparams.language == 'english':
all_chars = ascii + punctuation
else:
raise NotImplementedError()
'''
match words in the sentence with the ones in markers lines
Sentence: ,THAT's, an example'! ' of a sentence. . .'
Markers words: that s an example <sil> of a sentence
'''
# split sentence:
# [',', "that's", ',', 'an', "example'", '!', "'", 'of', 'a', 'sentence', '.', '.', '.', "'"]
sent_words = re.findall(f"[\w']+|[{punctuation}]", sentence.lower().strip())
# remove characters that are not letters or punctuation:
# [',', "that's", ',', 'an', "example'", '!', 'of', 'a', 'sentence', '.', '.', '.']
sent_words = [x for x in sent_words if len(re.sub(f'[^{all_chars}]', '', x)) != 0]
# be sure to begin the sentence with a word and not a punctuation
# ["that's", ',', 'an', "example'", '!', 'of', 'a', 'sentence', '.', '.', '.']
while sent_words[0] in punctuation:
sent_words.pop(0)
# keep only one punctuation type at the end
# ["that's", ',', 'an', "example'", '!', 'of', 'a', 'sentence']
punctuation_end = None
while sent_words[-1] in punctuation:
punctuation_end = sent_words.pop(-1)
# split markers lines -- [[begin, end, phone, word, word_idx], ....]
markers = [line.strip().split(sep='\t') for line in lines]
# extract markers words
# they are no '<sil>' at beginning and end of sentence because we trimmed the audio
# ['that', 's', 'an', example'', '<sil>', 'of', 'a', 'sentence']
words_idx = [marker[4] for marker in markers]
lines_idx = [words_idx.index(word_idx) for word_idx in list(dict.fromkeys(words_idx).keys())]
marker_words = [markers[line_idx][3] for line_idx in lines_idx]
# update markers with word boundaries
sent_words_copy, markers_old = sent_words.copy(), markers.copy()
markers, word_idx, word_error = [], 0, False
while len(sent_words) != 0:
# extract word in .lab sentence and .markers file
sent_word = sent_words.pop(0)
marker_word, marker_word_idx = markers_old[0][3], markers_old[0][4]
if marker_word != sent_word:
# we should have the same words
# generally the issue comes from the symbol '
# e.g. example' vs example or that's vs [that, s]
regex_word = re.findall(f"[\w]+|[{punctuation}]", sent_word)
if len(regex_word) == 1: # ['example']
sent_word = regex_word[0]
else: # ['that', 's']
sent_words = regex_word + sent_words
sent_word = sent_words.pop(0)
if marker_word != sent_word:
# cannot fix the mismatch between words
word_error = True
logger.warning(f'Correspondance issue between words in the .lab sentence and those in .markers file -- '
f'File name: {file_name} -- Sentence: {sent_words_copy} -- '
f'Markers: {marker_words} -- Problematic words: {sent_word} -- {marker_word}')
break
# retrieve all markers lines that correspond to the word
while len(markers_old) != 0 and markers_old[0][4] == marker_word_idx:
begin, end, phone, word, _ = markers_old.pop(0)
begin = f'{float(begin) - sent_begin:.3f}'
end = f'{float(end) - sent_begin:.3f}'
int_dur = str(int_durations.pop(0))
markers.append([begin, end, int_dur, phone, word, str(word_idx)])
# at this point we pass to the next word
# we must add a word boundary between two consecutive words
word_idx += 1
if len(sent_words) != 0:
word_bound = sent_words.pop(0) if sent_words[0] in punctuation else whitespace
# check if a silence marker is associated to the word boundary
if markers_old[0][3] == SIL_WORD_SYMBOL:
begin, end, _, _, _ = markers_old.pop(0)
begin = f'{float(begin) - sent_begin:.3f}'
end = f'{float(end) - sent_begin:.3f}'
int_dur = str(int_durations.pop(0))
markers.append([begin, end, int_dur, word_bound, word_bound, str(word_idx)])
else:
end_prev = markers[-1][1]
markers.append([end_prev, end_prev, str(0), word_bound, word_bound, str(word_idx)])
word_idx += 1
if not word_error:
# add end punctuation if there is one
if punctuation_end is not None:
end_prev = markers[-1][1]
markers.append([end_prev, end_prev, str(0), punctuation_end, punctuation_end, str(word_idx)])
word_idx += 1
# add EOS token
end_prev = markers[-1][1]
markers.append([end_prev, end_prev, str(0), eos, eos, str(word_idx)])
# check everything is correct
assert(len(sent_words) == len(markers_old) == len(int_durations) == 0), \
logger.error(f'File name: {file_name} -- length mismatch between lists: ({sent_words}, {markers_old}, {int_durations})')
return markers
else:
return None
def extract_pitch(wav, fs, hparams):
''' Extract pitch frames from audio using REAPER binary
Convert pitch to log scale and set unvoiced values to 0.
'''
# REAPER asks for int16 audios
# audio is in float32
wav = wav * 32768.0
wav = wav.astype('int16')
# save audio file locally
rand_name = str(uuid.uuid4())
out_dir = os.path.join(TMP_DIR, 'reaper')
os.makedirs(out_dir, exist_ok=True)
wav_file = os.path.join(out_dir, f'{rand_name}.wav')
wavfile.write(wav_file, fs, wav)
# extract pitch values
f0_file = wav_file.replace('.wav', '.f0')
process = ['reaper', '-i', f'{wav_file}',
'-a', '-f', f'{f0_file}',
'-e', f'{hparams.f0_interval}',
'-m', f'{hparams.min_f0}',
'-x', f'{hparams.max_f0}',
'-u', f'{hparams.uv_interval}',
'-w', f'{hparams.uv_cost}']
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(process, stdout=devnull, stderr=subprocess.STDOUT)
# read PCM file
with open(f0_file, 'rb') as f:
buf = f.read()
pitch = np.frombuffer(buf, dtype='int16')
# extract unvoiced indexes
pitch = np.copy(pitch)
uv_idxs = np.where(pitch <= 0.)[0]
# put to log scale
pitch[uv_idxs] = 1000.
pitch = np.log(pitch)
# set unvoiced values to 0.
pitch[uv_idxs] = 0.
# extract pitch for each mel-spec frame
pitch_frames = pitch[::hparams.hop_length]
# edge case
if len(pitch) % hparams.hop_length == 0:
pitch_frames = np.append(pitch_frames, pitch[-1])
# delete files
os.remove(wav_file)
os.remove(f0_file)
return pitch_frames
def get_symbols_pitch(pitch, markers):
''' Compute mean pitch per symbol
pitch = NumPy array of shape (nb_mel_spec_frames, )
markers = [[begin, end, int_dur, symbol, word, word_idx], ...]
'''
idx = 0
symbols_pitch = []
for marker in markers:
# number of mel-spec frames assigned to the symbol
int_dur = int(marker[2])
if int_dur != 0:
# ignore unvoiced values
symbol_pitch = pitch[idx: idx + int_dur]
symbol_pitch = symbol_pitch[symbol_pitch > 0.]
# compute mean pitch for voiced values
if len(symbol_pitch) != 0:
symbols_pitch.append(f'{np.mean(symbol_pitch):.3f}\n')
else:
symbols_pitch.append(f'{0.:.3f}\n')
idx += int_dur
else:
symbols_pitch.append(f'{0.:.3f}\n')
return symbols_pitch
def extract_energy(mel_spec):
''' Extract energy of each mel-spec frame
mel_spec = NumPy array of shape (nb_mel_spec_channels, nb_mel_spec_frames)
'''
energy = np.linalg.norm(mel_spec, axis=0)
return energy
def get_symbols_energy(energy, markers):
''' Compute mean energy per symbol
energy = NumPy array of shape (nb_mel_spec_frames, )
markers = [[begin, end, int_dur, symbol, word, word_idx], ...]
'''
idx = 0
symbols_energy = []
for marker in markers:
# number of mel-spec frames assigned to the symbol
int_dur = int(marker[2])
if int_dur != 0:
# compute mean energy
symbol_energy = energy[idx: idx + int_dur]
symbol_energy = np.mean(symbol_energy)
symbols_energy.append(f'{symbol_energy:.3f}\n')
idx += int_dur
else:
symbols_energy.append(f'{0.:.3f}\n')
return symbols_energy
def mel_spectrogram_HiFi(wav, hparams):
''' Mel-Spectrogram extraction as it is performed by HiFi-GAN
'''
# convert to PyTorch float tensor
wav = torch.FloatTensor(wav) # (T, )
# extract hparams
fmin = hparams.mel_fmin
fmax = hparams.mel_fmax
center = hparams.centered
hop_size = hparams.hop_length
n_fft = hparams.filter_length
num_mels = hparams.n_mel_channels
sampling_rate = hparams.sampling_rate
min_clipping = hparams.min_clipping
# get mel filter bank
mel_filter_bank = librosa_mel_fn(sampling_rate, n_fft, num_mels, fmin, fmax) # (n_mels, 1 + n_fft/2)
mel_filter_bank = torch.from_numpy(mel_filter_bank).float() # (n_mels, 1 + n_fft/2)
# build hann window
hann_window = torch.hann_window(n_fft)
# extract amplitude spectrogram
spec = torch.stft(wav, n_fft, hop_length=hop_size, win_length=n_fft, window=hann_window,
center=center, pad_mode='reflect', normalized=False, onesided=True, return_complex=False)
spec = torch.sqrt(spec.pow(2).sum(-1) + (1e-9))
# convert to mels and pass to log
mel_spec = torch.matmul(mel_filter_bank, spec)
mel_spec = torch.log(torch.clamp(mel_spec, min=min_clipping))
# transform to numpy array
mel_spec = mel_spec.squeeze().numpy()
return mel_spec
def rescale_wav_to_float32(x):
''' Rescale audio array between -1.f and 1.f based on the current format
'''
# convert
if x.dtype == 'int16':
y = x / 32768.0
elif x.dtype == 'int32':
y = x / 2147483648.0
elif x.dtype == 'uint8':
y = ((x / 255.0) - 0.5)*2
elif x.dtype == 'float32' or x.dtype == 'float64':
y = x
else:
raise TypeError(f"could not normalize wav, unsupported sample type {x.dtype}")
# check amplitude is correct
y = y.astype('float32')
max_ampl = np.max(np.abs(y))
if max_ampl > 1.0:
pass # the error should be raised but librosa returns values bigger than 1 sometimes
# raise ValueError(f'float32 wav contains samples not in the range [-1., 1.] -- '
# f'max amplitude: {max_ampl}')
return y
def _extract_features(files, features_dir, hparams, log_queue):
''' Extract mel-spectrogram and markers with int duration
'''
# create logger from logging queue
qh = logging.handlers.QueueHandler(log_queue)
root = logging.getLogger()
if not root.hasHandlers():
root.setLevel(logging.INFO)
root.addHandler(qh)
logger = logging.getLogger(f"worker{str(uuid.uuid4())}")
# check files exist
markers_file, wav_file = files
assert(os.path.isfile(markers_file)), logger.error(f'There is no such file: {markers_file}')
assert(os.path.isfile(wav_file)), logger.error(f'There is no such file: {wav_file}')
# read markers lines
with open(markers_file, 'r', encoding='utf-8') as f:
lines = f.readlines()
# check min phone duration is coherent
# min phone duration must be >= filter_length // 2
# in order to have at least one mel-spec frame attributed to the phone
min_phone_dur = get_min_phone_duration(lines)
fft_length = hparams.filter_length / hparams.sampling_rate
assert(min_phone_dur > fft_length / 2), \
logger.error(f'Min phone duration = {min_phone_dur} -- filter_length / 2 = {fft_length / 2}')
# extract sentence duration
# leading and tailing silences have been removed in markers.py script
sent_begin = float(lines[0].strip().split(sep='\t')[0])
sent_end = float(lines[-1].strip().split(sep='\t')[1])
sent_dur = sent_end - sent_begin
# ignore audio if length is inferior to min wav duration
if sent_dur >= hparams.minimum_wav_duration / 1000:
# read wav file to range [-1, 1] in np.float32
wav, fs = librosa.load(wav_file, sr=hparams.sampling_rate)
wav = rescale_wav_to_float32(wav)
# remove leading and tailing silences
wav = wav[int(sent_begin * fs): int(sent_end * fs)]
# extract mel-spectrogram
mel_spec = mel_spectrogram_HiFi(wav, hparams)
# get number of mel-spec frames
nb_mel_spec_frames = mel_spec.shape[1]
# convert phoneme durations to integer frame durations
float_durations = [[float(x[0]) - sent_begin, float(x[1]) - sent_begin]
for x in [line.strip().split(sep='\t') for line in lines]]
int_durations = duration_to_integer(float_durations, hparams, nb_samples=len(wav))
assert(len(int_durations) == len(lines)), logger.error(f'{markers_file} -- ({len(int_durations)}, {len(lines)})')
assert(sum(int_durations) == nb_mel_spec_frames), logger.error(f'{markers_file} -- ({sum(int_durations)}, {nb_mel_spec_frames})')
assert(0 not in int_durations), logger.error(f'{markers_file} -- {int_durations}')
# update markers:
# change timings to start from 0
# add punctuation or whitespace at word boundaries
# add EOS token at end of sentence
# add int durations
markers_dir = os.path.dirname(markers_file)
file_name = os.path.basename(markers_file).replace('.markers', '')
sentence_file = os.path.join(markers_dir, f'{file_name}.lab')
assert(os.path.isfile(sentence_file)), logger.error(f'There is no such file: {sentence_file}')
with open(sentence_file, 'r', encoding='utf-8') as f:
sentence = f.readline()
markers = update_markers(file_name, lines, sentence, sent_begin, int_durations, hparams, logger)
if markers is not None:
# save mel-spectrogram -- (n_mel_channels, T)
np.save(os.path.join(features_dir, f'{file_name}.npy'), mel_spec)
# save markers
# each line has the format: [begin, end, int_dur, symbol, word, word_idx]
markers_file = os.path.join(features_dir, f'{file_name}.markers')
with open(markers_file, 'w', encoding='utf-8') as f:
f.writelines(['\t'.join(x) + '\n' for x in markers])
# extract energy for each mel-spec frame
mel_spec = np.exp(mel_spec) # remove log
frames_energy = extract_energy(mel_spec)
# save frames energy values
energy_file = os.path.join(features_dir, f'{file_name}.frames_nrg')
with open(energy_file, 'w', encoding='utf-8') as f:
for val in frames_energy:
f.write(f'{val:.3f}\n')
# extract energy on the symbol level
# we use average energy value per symbol
symbols_energy = get_symbols_energy(frames_energy, markers)
# save symbols energy
energy_file = os.path.join(features_dir, f'{file_name}.symbols_nrg')
with open(energy_file, 'w', encoding='utf-8') as f:
f.writelines(symbols_energy)
# extract log pitch for each mel-spec frame
frames_pitch = extract_pitch(wav, fs, hparams)
assert(len(frames_pitch) == nb_mel_spec_frames), logger.error(f'{markers_file} -- ({len(frames_pitch)}, {nb_mel_spec_frames})')
# save frames pitch values
pitch_file = os.path.join(features_dir, f'{file_name}.frames_f0')
with open(pitch_file, 'w', encoding='utf-8') as f:
for val in frames_pitch:
f.write(f'{val:.3f}\n')
# extract pitch on the symbol level
# we use average pitch value per symbol
symbols_pitch = get_symbols_pitch(frames_pitch, markers)
# save symbols pitch values
pitch_file = os.path.join(features_dir, f'{file_name}.symbols_f0')
with open(pitch_file, 'w', encoding='utf-8') as f:
f.writelines(symbols_pitch)
else:
logger.warning(f'Ignoring {wav_file} -- audio has length inferior to {hparams.minimum_wav_duration / 1000}s after trimming')
def get_files_for_features_extraction(line, markers_dir, log_queue):
''' Return file name if .markers file exists
'''
# check if markers file exist for the corresponding line
line = line.strip().split(sep='|') # [file_name, text]
file_name = line[0].strip()
markers = os.path.join(markers_dir, f'{file_name}.markers')
if os.path.isfile(markers):
return file_name
else:
return None
def extract_features(dataset_dir, features_dir, hparams, n_jobs):
''' Extract features for training
'''
# iterate over speakers
_logger.info('--' * 30)
_logger.info('Extracting Features'.upper())
_logger.info('--' * 30)
for speaker in hparams.speakers:
_logger.info(f'Speaker: "{speaker}"')
# check wavs and markers dir exist
wavs_dir = os.path.join(dataset_dir, speaker, 'wavs')
markers_dir = os.path.join(dataset_dir, speaker, 'align')
assert(os.path.isdir(wavs_dir)), _logger.error(f'There is no such directory: {wavs_dir}')
assert(os.path.isdir(markers_dir)), _logger.error(f'There is no such directory: {markers_dir}')
# check metadata file exist
spk_features_dir = os.path.join(features_dir, speaker)
metadata = os.path.join(spk_features_dir, 'metadata.csv')
assert(os.path.isfile(metadata)), _logger.error(f'There is no such file: {metadata}')
# get all files that can be used for features extraction
with open(metadata, 'r', encoding='utf-8') as f:
lines = f.readlines()
file_names = launch_multi_process(iterable=lines, func=get_files_for_features_extraction,
n_jobs=n_jobs, markers_dir=markers_dir, timer_verbose=False)
file_names = [x for x in file_names if x is not None]
# check current files that exist in features dir
# avoid to process files that already have been processed in a previous features extraction
curr_files = [x.replace('.symbols_f0', '').strip() for x in os.listdir(spk_features_dir) if x.endswith('.symbols_f0')]
missing_files = [x for x in file_names if x not in curr_files]
_logger.info(f'{len(curr_files)} files already processed. {len(missing_files)} new files need to be processed')
# extract features
files = [(os.path.join(markers_dir, f'{x}.markers'), os.path.join(wavs_dir, f'{x}.wav')) for x in missing_files]
launch_multi_process(iterable=files, func=_extract_features, n_jobs=n_jobs,
features_dir=spk_features_dir, hparams=hparams)
# save config used to perform features extraction
hparams.save_hyper_params(os.path.join(spk_features_dir, 'config.json'))
_logger.info('')
# remove tmp directory
rmtree(TMP_DIR, ignore_errors=True)
| 43.599278 | 139 | 0.620187 | 3,283 | 24,154 | 4.379836 | 0.140725 | 0.015578 | 0.013214 | 0.008346 | 0.28479 | 0.229432 | 0.210029 | 0.17915 | 0.139161 | 0.103832 | 0 | 0.012451 | 0.265132 | 24,154 | 553 | 140 | 43.678119 | 0.797634 | 0.230811 | 0 | 0.155689 | 0 | 0.002994 | 0.12365 | 0.022482 | 0 | 0 | 0 | 0 | 0.035928 | 1 | 0.038922 | false | 0.002994 | 0.047904 | 0 | 0.125749 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
7794c9a15b5585aa2606eff1839893cdafad353f | 4,076 | py | Python | python/cuspatial/cuspatial/geometry/geoseries.py | AyodeAwe/cuspatial | 77971ac91a24228bc46cf461c0ac7b6f2ed78e44 | [
"Apache-2.0"
] | 347 | 2019-08-29T12:39:02.000Z | 2022-03-28T14:55:34.000Z | python/cuspatial/cuspatial/geometry/geoseries.py | AyodeAwe/cuspatial | 77971ac91a24228bc46cf461c0ac7b6f2ed78e44 | [
"Apache-2.0"
] | 425 | 2019-08-24T23:27:46.000Z | 2022-03-31T20:07:18.000Z | python/cuspatial/cuspatial/geometry/geoseries.py | AyodeAwe/cuspatial | 77971ac91a24228bc46cf461c0ac7b6f2ed78e44 | [
"Apache-2.0"
] | 109 | 2019-08-14T22:49:56.000Z | 2022-02-24T19:54:42.000Z | # Copyright (c) 2020-2021, NVIDIA CORPORATION
from typing import TypeVar, Union
import geopandas as gpd
import pandas as pd
from geopandas.geoseries import GeoSeries as gpGeoSeries
import cudf
from cuspatial.geometry.geoarrowbuffers import GeoArrowBuffers
from cuspatial.geometry.geocolumn import GeoColumn, GeoMeta
from cuspatial.io.geopandas_adapter import GeoPandasAdapter
T = TypeVar("T", bound="GeoSeries")
class GeoSeries(cudf.Series):
"""
cuspatial.GeoSeries enables GPU-backed storage and computation of
shapely-like objects. Our goal is to give feature parity with GeoPandas.
At this time, only from_geopandas and to_geopandas are directly supported.
cuspatial GIS, indexing, and trajectory functions depend on the arrays
stored in the `GeoArrowBuffers` object, accessible with the `points`,
`multipoints`, `lines`, and `polygons` accessors.
>>> cuseries.points
xy:
0 -1.0
1 0.0
dtype: float64
"""
def __init__(
self,
data: Union[gpd.GeoSeries],
index: Union[cudf.Index, pd.Index] = None,
dtype=None,
name=None,
nan_as_null=True,
):
# Condition index
if isinstance(data, (gpGeoSeries, GeoSeries)):
if index is None:
index = data.index
if index is None:
index = cudf.RangeIndex(0, len(data))
# Condition data
if isinstance(data, pd.Series):
data = gpGeoSeries(data)
# Create column
if isinstance(data, GeoColumn):
column = data
elif isinstance(data, GeoSeries):
column = data._column
elif isinstance(data, gpGeoSeries):
adapter = GeoPandasAdapter(data)
buffers = GeoArrowBuffers(adapter.get_geoarrow_host_buffers())
pandas_meta = GeoMeta(adapter.get_geopandas_meta())
column = GeoColumn(buffers, pandas_meta)
else:
raise TypeError(
f"Incompatible object passed to GeoSeries ctor {type(data)}"
)
super().__init__(column, index, dtype, name, nan_as_null)
@property
def _geocolumn(self):
"""
The GeoColumn object keeps a reference to a `GeoArrowBuffers` object,
which contains all of the geometry coordinates and offsets for thie
`GeoSeries`.
"""
return self._column
@_geocolumn.setter
def _geocolumn(self, value):
if not isinstance(value, GeoColumn):
raise TypeError
self._column = value
@property
def points(self):
"""
Access the `PointsArray` of the underlying `GeoArrowBuffers`.
"""
return self._geocolumn.points
@property
def multipoints(self):
"""
Access the `MultiPointArray` of the underlying `GeoArrowBuffers`.
"""
return self._geocolumn.multipoints
@property
def lines(self):
"""
Access the `LineArray` of the underlying `GeoArrowBuffers`.
"""
return self._geocolumn.lines
@property
def polygons(self):
"""
Access the `PolygonArray` of the underlying `GeoArrowBuffers`.
"""
return self._geocolumn.polygons
def __repr__(self):
# TODO: Implement Iloc with slices so that we can use `Series.__repr__`
return self.to_pandas().__repr__()
def to_geopandas(self, nullable=False):
"""
Returns a new GeoPandas GeoSeries object from the coordinates in
the cuspatial GeoSeries.
"""
if nullable is True:
raise ValueError("GeoSeries doesn't support <NA> yet")
host_column = self._geocolumn.to_host()
output = [host_column[i].to_shapely() for i in range(len(host_column))]
return gpGeoSeries(output, index=self.index.to_pandas())
def to_pandas(self):
"""
Treats to_pandas and to_geopandas as the same call, which improves
compatibility with pandas.
"""
return self.to_geopandas()
| 31.114504 | 79 | 0.629539 | 449 | 4,076 | 5.587973 | 0.35412 | 0.0279 | 0.020725 | 0.047828 | 0.092467 | 0.078119 | 0.078119 | 0 | 0 | 0 | 0 | 0.00585 | 0.287046 | 4,076 | 130 | 80 | 31.353846 | 0.857536 | 0.300294 | 0 | 0.101449 | 0 | 0 | 0.038861 | 0 | 0 | 0 | 0 | 0.007692 | 0 | 1 | 0.144928 | false | 0.014493 | 0.115942 | 0.014493 | 0.391304 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
7795432cd6d3090d060937e876a441cb4cc29351 | 29,824 | py | Python | src/itol_pfamtree.py | ElofssonLab/TMplot | 6a19cd1cb792733fb5db067e333c4ab3d5e238b7 | [
"MIT"
] | null | null | null | src/itol_pfamtree.py | ElofssonLab/TMplot | 6a19cd1cb792733fb5db067e333c4ab3d5e238b7 | [
"MIT"
] | 5 | 2020-07-10T10:10:06.000Z | 2022-03-11T23:41:28.000Z | src/itol_pfamtree.py | ElofssonLab/TMplot | 6a19cd1cb792733fb5db067e333c4ab3d5e238b7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
from ete3 import Tree
import shutil
import math
import myfunc
from colour import Color
blue = Color("blue")
red = Color("red")
from itolapi import Itol
from itolapi import ItolExport
rundir = os.path.dirname(os.path.realpath(__file__))
usage="""
USAGE: itol_pfamtree.py [-datapath DIR] -l pfamidlist [ID [ID ...]]
Visualize phylogenetic tree of Pfam family, highlighting important features
of membrane proteins
OPTIONS:
-m, -method STR Method for visualization, (default: 0)
0:
1:
linear:
sd1: phylogenetic tree showing the number of the TM helices and with orientation
represented by either red or blue, and also color subfamilies differently
in the branches.
sd2: phylogenetic tree with domain architectures
sd3: phylogenetic tree with the first level (kingdom) colored in branches.
and the next three levels are colored in outer circles,(three circles)
-datapath DIR Set datapath, (default: ./)
-treefile FILE Set the tree file directly
-fastafile FILE Set the fasta file providing the annotation
-outpath DIR Set outpath, (default: ./)
-q Quiet mode
-h, --help Print this help message and exit
Created 2012-03-13, updated 2019-08-16, Nanjiang Shu
"""
def PrintHelp():#{{{
print(usage)
#}}}
def GetFontSize(numLeave):#{{{
fontsize = 500/math.sqrt(numLeave)
fontsize = max(30, fontsize)
fontsize = min(200, fontsize)
return fontsize
#}}}
def WriteDomainColorDefFile(domain_colordef_file, domain_dict, domain_idlist, seqlen_dict, leaves_name_set):#{{{
"""Write domain color definition file for iTOL given domain file
"""
default_color = "#008000"
default_shape = "HH"
lst_color = list(blue.range_to(red,len(domain_idlist)))
color_dict = {}
for i in range(len(domain_idlist)):
domainid = domain_idlist[i]
color = lst_color[i].get_hex_l()
color_dict[domainid] = lst_color[i].get_hex_l()
try:
fpout = open(domain_colordef_file, "w")
# write the domain colordef file
for seqid in domain_dict:
if not seqid in leaves_name_set:
continue
seqlen = seqlen_dict[seqid]
fpout.write("%s,%d"%(seqid, seqlen))
shape = default_shape
dlist = domain_dict[seqid]
for dm in dlist:
domainid = dm[2]
color = color_dict[domainid]
#color = default_color
fpout.write(",%s|%d|%d|%s|%s"%(shape, dm[0]+1, dm[1]+1, color, domainid))
fpout.write("\n")
return 0
except IOError:
print("Failed to write to file %s"%(domain_colordef_file), file=sys.stderr)
return 1
#}}}
def Itol_Tree_m0(pfamid, datapath, outpath):#{{{
#Create the Itol class
itl = Itol()
#Set the tree file
tree = datapath + os.sep + pfamid + '.tree'
(datafile1, datafile2, datafile3, datafile4) = ("", "", "", "")
if not os.path.exists(tree):
print("tree file %s does not exist. Ignore" %(tree), file=sys.stderr)
return 1
t = Tree(tree)
leaves = t.get_leaves()
numLeave = len(leaves)
fontsize = GetFontSize(numLeave)
datafile1 = datapath + os.sep + pfamid + '.numTM_and_io.txt'
# datafile2 = datapath + os.sep + pfamid + '.cmpclass.colordef.txt'
# datafile3 = datapath + os.sep + pfamid + '.ntermstate.colordef.txt'
datafile4 = datapath + os.sep + pfamid + '.cluster.colordef.txt'
colordeffile = datapath + os.sep + pfamid + '.pfam.colordef.txt'
branchlabelfile = datapath + os.sep + pfamid + '.branchlabel.txt'
datafileList = [datafile1, datafile2, datafile3, datafile4, colordeffile, branchlabelfile]
rootname = os.path.basename(os.path.splitext(tree)[0])
#===================================
itl.add_file(tree)
itl.params['treeName'] = rootname
itl.params['treeFormat'] = 'newick'
valid_datafileList = []
for datafile in datafileList:
if os.path.exists(datafile):
itl.add_file(datafile)
valid_datafileList.append(datafile)
datasets_list = [str(x) for x in range(len(valid_datafileList))]
# Check parameters
# itl.print_variables()
#Submit the tree
print('')
print('Uploading the tree. This may take some time depending on how large the tree is and how much load there is on the itol server')
good_upload = itl.upload()
if good_upload == False:
print('There was an error:'+itl.comm.upload_output)
sys.exit(1)
#Read the tree ID
print('Tree ID: '+str(itl.comm.tree_id))
#Read the iTOL API return statement
print('iTOL output: '+str(itl.comm.upload_output))
#Website to be redirected to iTOL tree
print('Tree Web Page URL: '+itl.get_webpage())
# Warnings associated with the upload
print('Warnings: '+str(itl.comm.warnings))
#Export to pdf
print('Exporting to pdf')
itol_exporter = itl.get_itol_export()
#itol_exporter = itolexport.ItolExport()
#itol_exporter.set_export_param_value('tree','18793532031912684633930')
itol_exporter.set_export_param_value('format', 'pdf')
itol_exporter.set_export_param_value('display_mode',"2")
#itol_exporter.set_export_param_value('current_font_size',fontsize)
itol_exporter.set_export_param_value('align_labels',"1")
itol_exporter.set_export_param_value('datasets_visible',",".join(datasets_list))
epsfile = outpath + os.sep + pfamid + '-itol.eps'
pdffile = outpath + os.sep + pfamid + '-itol.pdf'
jpgfile = outpath + os.sep + pfamid + '-itol.jpg'
thumbfile = outpath + os.sep + "thumb." + pfamid + '-itol.jpg'
itol_exporter.export(pdffile)
#os.system("epstopdf %s" % epsfile )
os.system("convert %s %s" % (pdffile, jpgfile) )
os.system("convert -thumbnail 200 %s %s" % (jpgfile, thumbfile))
print('exported tree to ',pdffile)
#}}}
def Itol_Tree_m1(pfamid, datapath, outpath):#{{{
# TM helices are treated as domains
#Create the Itol class
itl = Itol()
#Set the tree file
tree = datapath + os.sep + pfamid + '.tree'
(datafile1, datafile2, datafile3, datafile4) = ("", "", "", "")
if not os.path.exists(tree):
print("tree file %s does not exist. Ignore" %(tree), file=sys.stderr)
return 1
t = Tree(tree)
leaves = t.get_leaves()
numLeave = len(leaves)
fontsize = GetFontSize(numLeave)
datafile1 = datapath + os.sep + pfamid + '.numTM_and_io.txt'
datafile2 = datapath + os.sep + pfamid + '.cmpclass.colordef.txt'
# datafile3 = datapath + os.sep + pfamid + '.ntermstate.colordef.txt'
datafile4 = datapath + os.sep + pfamid + '.cluster.colordef.txt'
colordeffile = datapath + os.sep + pfamid + '.pfam.colordef.txt'
branchlabelfile = datapath + os.sep + pfamid + '.branchlabel.txt'
#===================================
itl.add_variable('treeFile',tree)
itl.add_variable('treeName','PF00854')
itl.add_variable('treeFormat','newick')
if os.path.exists(colordeffile):
itl.add_variable('colorDefinitionFile', colordeffile)
if os.path.exists(branchlabelfile):
itl.add_variable('branchLabelsFile', branchlabelfile)
#===================================
if os.path.exists(datafile1):
itl.add_variable('datafile1File',datafile1)
itl.add_variable('datafile1Label','numTM_and_io')
itl.add_variable('datafile1Separator','comma')
itl.add_variable('datafile1Type','multibar')
itl.add_variable('datafile1PreventOverlap','1')
itl.add_variable('datafile1Color','#FF0000')
#===================================
if os.path.exists(datafile2):
itl.add_variable('datafile2File', datafile2)
itl.add_variable('datafile2Label', 'cmpclass')
itl.add_variable('datafile2Separator','comma')
itl.add_variable('datafile2Type','colorstrip')
itl.add_variable('datafile2StripWidth','200')
itl.add_variable('datafile2PreventOverlap','1')
itl.add_variable('datafile2ColoringType','both')
#===================================
if os.path.exists(datafile3):
itl.add_variable('datafile3File', datafile3)
itl.add_variable('datafile3Label', 'NtermState')
itl.add_variable('datafile3Separator','comma')
itl.add_variable('datafile3Type','colorstrip')
itl.add_variable('datafile3StripWidth','200')
itl.add_variable('datafile3PreventOverlap','1')
itl.add_variable('datafile3ColoringType','both')
#===================================
if os.path.exists(datafile4):
itl.add_variable('datafile4File', datafile4)
itl.add_variable('datafile4Label', 'cluster')
itl.add_variable('datafile4Separator','comma')
itl.add_variable('datafile4Type','colorstrip')
itl.add_variable('datafile4StripWidth','200')
itl.add_variable('datafile4PreventOverlap','1')
itl.add_variable('datafile4ColoringType','both')
itl.add_variable('datafile4BranchColoringType','both')
#itl.add_variable('datafile1BarSizeMax','1')
#===================================
# Check parameters
# itl.print_variables()
#Submit the tree
print('')
print('Uploading the tree. This may take some time depending on how large the tree is and how much load there is on the itol server')
good_upload = itl.upload()
if good_upload == False:
print('There was an error:'+itl.comm.upload_output)
sys.exit(1)
#Read the tree ID
print('Tree ID: '+str(itl.comm.tree_id))
#Read the iTOL API return statement
print('iTOL output: '+str(itl.comm.upload_output))
#Website to be redirected to iTOL tree
print('Tree Web Page URL: '+itl.get_webpage())
# Warnings associated with the upload
print('Warnings: '+str(itl.comm.warnings))
#Export to pdf
print('Exporting to pdf')
itol_exporter = itl.get_itol_export()
#itol_exporter = itolexport.ItolExport()
#itol_exporter.set_export_param_value('tree','18793532031912684633930')
itol_exporter.set_export_param_value('format', 'eps')
itol_exporter.set_export_param_value('display_mode',"2")
itol_exporter.set_export_param_value('current_font_size',fontsize)
itol_exporter.set_export_param_value('align_labels',"1")
itol_exporter.set_export_param_value('datafileList','dataset1')
epsfile = outpath + os.sep + pfamid + '-itol.eps'
pdffile = outpath + os.sep + pfamid + '-itol.pdf'
jpgfile = outpath + os.sep + pfamid + '-itol.jpg'
thumbfile = outpath + os.sep + "thumb." + pfamid + '-itol.jpg'
itol_exporter.export(epsfile)
os.system("epstopdf %s" % epsfile )
os.system("convert %s %s" % (epsfile, jpgfile) )
os.system("convert -thumbnail 200 %s %s" % (jpgfile, thumbfile))
print('exported tree to ',pdffile)
#}}}
def Itol_Tree_m_sd1(pfamid, datapath, outpath):#{{{
"""Phylogenetic tree with numTM_io and subfamilies branch coloring
"""
tree = datapath + os.sep + pfamid + '.tree'
t = Tree(tree)
leaves = t.get_leaves()
lst_leaves_name = []
for leaf in leaves:
lst_leaves_name.append(leaf.name)
numLeave = len(lst_leaves_name)
# read subfamily definition
subfamfile = "%s/%s.subfamilies"%(datapath, pfamid)
subfam_idlist = []
subfamDict = myfunc.Read_subfamily(subfamfile, subfam_idlist)
numSubFam = len(subfam_idlist)
# create subfamily branch color definition file
subfam_colordef_file = "%s/%s.subfamilies.colordef.txt"%(outpath, pfamid)
lst_color = list(blue.range_to(red,numSubFam))
color_dict = {}
for i in range(numSubFam):
famid = subfam_idlist[i]
color = lst_color[i].get_hex_l()
color_dict[famid] = lst_color[i].get_hex_l()
myfunc.WriteSubFamColorDef(subfam_colordef_file, subfamDict, lst_leaves_name, color_dict)
#Create the Itol class
itl = Itol()
#Set the tree file
(datafile1, datafile2, datafile3, datafile4) = ("", "", "", "")
if not os.path.exists(tree):
print("tree file %s does not exist. Ignore" %(tree), file=sys.stderr)
return 1
fontsize = GetFontSize(numLeave)
datafile1 = datapath + os.sep + pfamid + '.numTM_and_io.txt'
colordeffile = subfam_colordef_file
if os.path.exists(colordeffile):
itl.add_variable('colorDefinitionFile', colordeffile)
itl.add_variable('colorDefinitionLabel', "Subfamilies")
#===================================
itl.add_variable('treeFile',tree)
itl.add_variable('treeName','SD1')
itl.add_variable('treeFormat','newick')
#===================================
if os.path.exists(datafile1):
itl.add_variable('datafile1File',datafile1)
itl.add_variable('datafile1Label','numTM_and_io')
itl.add_variable('datafile1Separator','comma')
itl.add_variable('datafile1Type','multibar')
itl.add_variable('datafile1PreventOverlap','1')
itl.add_variable('datafile1Color','#FF0000')
#===================================
# Check parameters
# itl.print_variables()
#Submit the tree
print('')
print('Uploading the tree. This may take some time depending on how large the tree is and how much load there is on the itol server')
good_upload = itl.upload()
if good_upload == False:
print('There was an error:'+itl.comm.upload_output)
sys.exit(1)
#Read the tree ID
print('Tree ID: '+str(itl.comm.tree_id))
#Read the iTOL API return statement
print('iTOL output: '+str(itl.comm.upload_output))
#Website to be redirected to iTOL tree
print('Tree Web Page URL: '+itl.get_webpage())
# Warnings associated with the upload
print('Warnings: '+str(itl.comm.warnings))
#Export to pdf
itol_exporter = itl.get_itol_export()
#itol_exporter = itolexport.ItolExport()
#itol_exporter.set_export_param_value('tree','18793532031912684633930')
itol_exporter.set_export_param_value('format', 'eps')
itol_exporter.set_export_param_value('display_mode',"2")
itol_exporter.set_export_param_value('current_font_size',fontsize)
itol_exporter.set_export_param_value('align_labels',"1")
itol_exporter.set_export_param_value('datafileList','dataset1')
extname = "-itol-sd1"
epsfile = outpath + os.sep + pfamid + extname + '.eps'
pdffile = outpath + os.sep + pfamid + extname + '.pdf'
jpgfile = outpath + os.sep + pfamid + extname + '.jpg'
pngfile = outpath + os.sep + pfamid + extname + '.png'
thumbfile = outpath + os.sep + "thumb." + pfamid + extname + '.jpg'
itol_exporter.export(epsfile)
os.system("epstopdf %s" % epsfile )
os.system("convert %s %s" % (epsfile, jpgfile) )
os.system("convert -thumbnail 200 %s %s" % (jpgfile, thumbfile))
print('exported tree to ',pdffile)
#}}}
def Itol_Tree_m_sd2(pfamid, datapath, outpath):#{{{
tree = datapath + os.sep + pfamid + '.tree'
t = Tree(tree)
leaves = t.get_leaves()
lst_leaves_name = []
for leaf in leaves:
lst_leaves_name.append(leaf.name)
numLeave = len(lst_leaves_name)
leaves_name_set = set(lst_leaves_name)
# read seqlen file
seqlenfile = "%s/%s.seqlen.txt"%(datapath, pfamid)
seqlen_dict = myfunc.ReadSeqLengthDict(seqlenfile)
# read subfamily definition
domain_idlist = []
domainfile = "%s/%s.mdp"%(datapath, pfamid)
domain_dict = myfunc.Read_domain_sd(domainfile, domain_idlist)
domain_colordef_file = "%s/%s.mdp.colordef.txt"%(datapath, pfamid)
WriteDomainColorDefFile(domain_colordef_file, domain_dict, domain_idlist, seqlen_dict, leaves_name_set)
#Create the Itol class
itl = Itol()
#Set the tree file
(datafile1, datafile2, datafile3, datafile4) = ("", "", "", "")
if not os.path.exists(tree):
print("tree file %s does not exist. Ignore" %(tree), file=sys.stderr)
return 1
fontsize = GetFontSize(numLeave)
datafile1 = domain_colordef_file
# colordeffile = subfam_colordef_file
# if os.path.exists(colordeffile):
# itl.add_variable('colorDefinitionFile', colordeffile)
# itl.add_variable('colorDefinitionLabel', "Subfamilies")
#===================================
itl.add_variable('treeFile',tree)
itl.add_variable('treeName','SD2')
itl.add_variable('treeFormat','newick')
#===================================
if os.path.exists(datafile1):
itl.add_variable('datafile1File',datafile1)
itl.add_variable('datafile1Label','Domain architecture')
itl.add_variable('datafile1Separator','comma')
itl.add_variable('datafile1Type','domains')
itl.add_variable('datafile1ProtSizeMax','1000')
itl.add_variable('datafile1PreventOverlap','1')
itl.add_variable('datafile1CirclesSpacing','100')
#===================================
# Check parameters
# itl.print_variables()
#Submit the tree
print('')
print('Uploading the tree. This may take some time depending on how large the tree is and how much load there is on the itol server')
good_upload = itl.upload()
if good_upload == False:
print('There was an error:'+itl.comm.upload_output)
sys.exit(1)
#Read the tree ID
print('Tree ID: '+str(itl.comm.tree_id))
#Read the iTOL API return statement
print('iTOL output: '+str(itl.comm.upload_output))
#Website to be redirected to iTOL tree
print('Tree Web Page URL: '+itl.get_webpage())
# Warnings associated with the upload
print('Warnings: '+str(itl.comm.warnings))
#Export to pdf
itol_exporter = itl.get_itol_export()
#itol_exporter = itolexport.ItolExport()
#itol_exporter.set_export_param_value('tree','18793532031912684633930')
itol_exporter.set_export_param_value('format', 'eps')
itol_exporter.set_export_param_value('display_mode',"2")
itol_exporter.set_export_param_value('current_font_size',fontsize)
itol_exporter.set_export_param_value('align_labels',"1")
itol_exporter.set_export_param_value('datafileList','dataset1')
extname = "-itol-sd2"
epsfile = outpath + os.sep + pfamid + extname + '.eps'
pdffile = outpath + os.sep + pfamid + extname + '.pdf'
jpgfile = outpath + os.sep + pfamid + extname + '.jpg'
pngfile = outpath + os.sep + pfamid + extname + '.png'
thumbfile = outpath + os.sep + "thumb." + pfamid + extname + '.jpg'
itol_exporter.export(epsfile)
os.system("epstopdf %s" % epsfile )
os.system("convert %s %s" % (epsfile, jpgfile) )
os.system("convert -thumbnail 200 %s %s" % (jpgfile, thumbfile))
print('exported tree to ',pdffile)
#}}}
def Itol_Tree_m_sd3(pfamid, datapath, outpath):#{{{
"""Phylogenetic tree with species definition
the Kindom use branch colordefinition, and others using color strips
"""
tree = datapath + os.sep + pfamid + '.tree'
t = Tree(tree)
leaves = t.get_leaves()
lst_leaves_name = []
for leaf in leaves:
lst_leaves_name.append(leaf.name)
numLeave = len(lst_leaves_name)
leaves_name_set = set(lst_leaves_name)
# read species definition
speciesfile = "%s/%s.species"%(datapath, pfamid)
speciesDict = myfunc.Read_species_sd(speciesfile)
# create branch color definition file for kingdom
lst_kingdom = ["Archaea","Bacteria", "Eukaryota" ]
lst_color_kingdom = ["#ff0000", "#0066ff","#cc6600"]
species_colordef_file = "%s/%s.kingdom.colordef.txt"%(outpath, pfamid)
color_dict_kingdom = {}
this_speciesDict = {}
for seqid in speciesDict:
speciesname = speciesDict[seqid][0]
this_speciesDict[seqid] = speciesname
for i in range(len(lst_kingdom)):
idd = lst_kingdom[i]
color_dict_kingdom[idd] = lst_color_kingdom[i]
myfunc.WriteKingdomColorDefFile(species_colordef_file, this_speciesDict, leaves_name_set, color_dict_kingdom)
# generate the next three levels of classification
for level in [1,2,3]:
outfile = "%s/%s.species.level_%d.txt"%(outpath, pfamid, level)
this_speciesDict = {}
speciesIDSet = set([])
for seqid in speciesDict:
try:
speciesname = speciesDict[seqid][level]
speciesIDSet.add(speciesname)
this_speciesDict[seqid] = speciesname
except IndexError as KeyError:
pass
color_dict = {}
lst_color = list(blue.range_to(red,len(speciesIDSet)))
lst_speciesID = list(speciesIDSet)
for i in range(len(lst_speciesID)):
idd = lst_speciesID[i]
color_dict[idd] = lst_color[i].get_hex_l()
myfunc.WriteSpeciesColorStripDefFile(outfile, this_speciesDict, leaves_name_set, color_dict)
#Create the Itol class
itl = Itol()
#Set the tree file
(datafile1, datafile2, datafile3, datafile4) = ("", "", "", "")
if not os.path.exists(tree):
print("tree file %s does not exist. Ignore" %(tree), file=sys.stderr)
return 1
fontsize = GetFontSize(numLeave)
datafile1 = "%s/%s.species.level_%d.txt"%(outpath, pfamid, 1)
datafile2 = "%s/%s.species.level_%d.txt"%(outpath, pfamid, 2)
datafile3 = "%s/%s.species.level_%d.txt"%(outpath, pfamid, 3)
colordeffile = species_colordef_file
if os.path.exists(colordeffile):
itl.add_variable('colorDefinitionFile', colordeffile)
itl.add_variable('colorDefinitionLabel', "Kingdom")
#===================================
itl.add_variable('treeFile',tree)
itl.add_variable('treeName','SD3')
itl.add_variable('treeFormat','newick')
#===================================
if os.path.exists(datafile1):
itl.add_variable('datafile1File',datafile1)
itl.add_variable('datafile1Label','Phylum')
itl.add_variable('datafile1Separator','comma')
itl.add_variable('datafile1Type','colorstrip')
itl.add_variable('datafile1StripWidth','100')
itl.add_variable('datafile1ColoringType','both')
itl.add_variable('datafile1PreventOverlap','1')
#===================================
if os.path.exists(datafile2):
itl.add_variable('datafile2File',datafile2)
itl.add_variable('datafile2Label','Class')
itl.add_variable('datafile2Separator','comma')
itl.add_variable('datafile2Type','colorstrip')
itl.add_variable('datafile2StripWidth','100')
itl.add_variable('datafile2ColoringType','both')
itl.add_variable('datafile2PreventOverlap','1')
#===================================
if os.path.exists(datafile3):
itl.add_variable('datafile3File',datafile3)
itl.add_variable('datafile3Label','Order')
itl.add_variable('datafile3Separator','comma')
itl.add_variable('datafile3Type','colorstrip')
itl.add_variable('datafile3StripWidth','100')
itl.add_variable('datafile3ColoringType','both')
itl.add_variable('datafile3PreventOverlap','1')
#===================================
# Check parameters
# itl.print_variables()
#Submit the tree
print('')
print('Uploading the tree. This may take some time depending on how large the tree is and how much load there is on the itol server')
good_upload = itl.upload()
if good_upload == False:
print('There was an error:'+itl.comm.upload_output)
sys.exit(1)
#Read the tree ID
print('Tree ID: '+str(itl.comm.tree_id))
#Read the iTOL API return statement
print('iTOL output: '+str(itl.comm.upload_output))
#Website to be redirected to iTOL tree
print('Tree Web Page URL: '+itl.get_webpage())
# Warnings associated with the upload
print('Warnings: '+str(itl.comm.warnings))
#Export to pdf
itol_exporter = itl.get_itol_export()
#itol_exporter = itolexport.ItolExport()
#itol_exporter.set_export_param_value('tree','18793532031912684633930')
itol_exporter.set_export_param_value('format', 'eps')
itol_exporter.set_export_param_value('display_mode',"2")
itol_exporter.set_export_param_value('current_font_size',fontsize)
itol_exporter.set_export_param_value('align_labels',"1")
itol_exporter.set_export_param_value('datafileList','dataset1')
extname = "-itol-sd3"
epsfile = outpath + os.sep + pfamid + extname + '.eps'
pdffile = outpath + os.sep + pfamid + extname + '.pdf'
jpgfile = outpath + os.sep + pfamid + extname + '.jpg'
pngfile = outpath + os.sep + pfamid + extname + '.png'
thumbfile = outpath + os.sep + "thumb." + pfamid + extname + '.jpg'
itol_exporter.export(epsfile)
os.system("epstopdf %s" % epsfile )
os.system("convert %s %s" % (epsfile, jpgfile) )
os.system("convert -thumbnail 200 %s %s" % (jpgfile, thumbfile))
print('exported tree to ',pdffile)
#}}}
def Itol_Tree_linear(treefile, fastafile, outpath):# {{{
"""
Generate itol tree linear
"""
if not treefile.endswith(".tree") or treefile.endswith(".tree.txt"):
newfile = treefile + ".tree"
shutil.copy2(treefile, newfile)
treefile = newfile
#Create the Itol class
itl = Itol()
#Set the tree file
tree = treefile
dirname = os.path.dirname(treefile)
if dirname == "":
dirname = "."
if outpath == "":
outpath = dirname
elif not os.path.exists(outpath):
os.system("mkdir -p %s"%(outpath))
rootname = os.path.basename(os.path.splitext(treefile)[0])
rtname_fastafile = os.path.basename(os.path.splitext(fastafile)[0])
colorragefile = dirname + os.sep + rtname_fastafile + ".colorrage.txt"
datafileList = [colorragefile]
if os.path.exists(fastafile):
cmd = "python %s/fasta2colorrange.py %s %s > %s"%(rundir, fastafile, treefile,colorragefile)
os.system(cmd)
#===================================
itl.add_file(tree)
itl.params['treeName'] = rootname
itl.params['treeFormat'] = 'newick'
valid_datafileList = []
for datafile in datafileList:
if os.path.exists(datafile):
itl.add_file(datafile)
valid_datafileList.append(datafile)
datasets_list = [str(x) for x in range(len(valid_datafileList))]
#===================================
# Check parameters
# itl.print_variables()
#Submit the tree
print('')
good_upload = itl.upload()
if good_upload == False:
print('There was an error:'+itl.comm.upload_output)
return 1
#Export to pdf
tree_id = itl.comm.tree_id
itol_exporter = itl.get_itol_export()
itol_exporter.set_export_param_value('format',"pdf")
itol_exporter.set_export_param_value('display_mode',"1") #(1=normal, 2=circular, 3=unrooted)
itol_exporter.set_export_param_value('line_width',"1")
#itol_exporter.set_export_param_value('align_labels',"1")
print(('datasets_visible',",".join(datasets_list)))
itol_exporter.set_export_param_value('datasets_visible',",".join(datasets_list))
#epsfile = outpath + os.sep + rootname + '.itolnormal.eps'
pdffile = outpath + os.sep + rootname + '.itol_linear.pdf'
print('Exporting to pdffile %s'%(pdffile))
itol_exporter.export(pdffile)
print(("Phylogenetic tree has been output to %s"%(pdffile)))
# }}}
def main(g_params):#{{{
argv = sys.argv
numArgv = len(argv)
if numArgv < 2:
PrintHelp()
return 1
datapath = "."
outpath = './'
idList = []
idListFile = ''
treefile = ""
fastafile = ""
i = 1;
isNonOptionArg=False
while i < numArgv:
if isNonOptionArg == True:
isNonOptionArg=False;
idList.append(sys.argv[i])
i = i + 1;
elif sys.argv[i] == "--":
isNonOptionArg=True;
i = i + 1;
elif sys.argv[i][0] == "-":
if sys.argv[i] in [ "-h", "--help"]:
PrintHelp();
return 1
elif sys.argv[i] in [ "-datapath", "--datapath"]:
datapath = sys.argv[i+1]
i += 2;
elif argv[i] in [ "-m", "--m", "-method", "--method"]:
g_params['method'], i = myfunc.my_getopt_str(sys.argv, i)
elif sys.argv[i] in [ "-treefile", "--treefile"]:
treefile = sys.argv[i+1]
i += 2;
elif sys.argv[i] in [ "-fastafile", "--fastafile"]:
fastafile = sys.argv[i+1]
i += 2;
elif sys.argv[i] in [ "-l", "--l"]:
idListFile = sys.argv[i+1]
i = i + 2;
elif sys.argv[i] in ["-outpath", "--outpath"]:
outpath = sys.argv[i+1];
i = i + 2;
else:
print(("Error! Wrong argument:%s" % sys.argv[i]), file=sys.stderr);
return 1
else:
idList.append(sys.argv[i]);
i+=1;
if idListFile != "":
idList += myfunc.ReadIDList(idListFile)
if len(idList) > 0:
os.system("mkdir -p %s"%outpath)
cnt = 0
for pfamid in idList:
print("================== ", cnt , pfamid, " ====================")
if g_params['method'] == "0":
Itol_Tree_m0(pfamid, datapath, outpath)
elif g_params['method'] == "1":
Itol_Tree_m1(pfamid, datapath, outpath)
elif g_params['method'] == "sd1":
Itol_Tree_m_sd1(pfamid, datapath, outpath)
elif g_params['method'] == "sd2":
Itol_Tree_m_sd2(pfamid, datapath, outpath)
elif g_params['method'] == "sd3":
Itol_Tree_m_sd3(pfamid, datapath, outpath)
cnt += 1
if treefile != "":
if g_params['method'] == "linear":
Itol_Tree_linear(treefile, fastafile, outpath)
#}}}
def InitGlobalParameter():#{{{
g_params = {}
g_params['method'] = "0"
return g_params
#}}}
if __name__ == '__main__' :
g_params = InitGlobalParameter()
sys.exit(main(g_params))
| 37.992357 | 138 | 0.638345 | 3,572 | 29,824 | 5.173012 | 0.112262 | 0.02825 | 0.062886 | 0.039777 | 0.712523 | 0.68633 | 0.663816 | 0.632861 | 0.618682 | 0.602609 | 0 | 0.018607 | 0.208926 | 29,824 | 784 | 139 | 38.040816 | 0.764591 | 0.130063 | 0 | 0.551786 | 0 | 0.008929 | 0.229939 | 0.02526 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019643 | false | 0.001786 | 0.017857 | 0 | 0.060714 | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
779695a6c206665b577fbc40ca7b7a3eb86e2492 | 1,665 | py | Python | scripts/howorka/interpolation_points.py | jhwnkim/nanopores | 98b3dbb5d36464fbdc03f59d224d38e4255324ce | [
"MIT"
] | 8 | 2016-09-07T01:59:31.000Z | 2021-03-06T12:14:31.000Z | scripts/howorka/interpolation_points.py | jhwnkim/nanopores | 98b3dbb5d36464fbdc03f59d224d38e4255324ce | [
"MIT"
] | null | null | null | scripts/howorka/interpolation_points.py | jhwnkim/nanopores | 98b3dbb5d36464fbdc03f59d224d38e4255324ce | [
"MIT"
] | 4 | 2017-12-06T17:43:01.000Z | 2020-05-01T05:41:14.000Z | "create 2D point set for Howorka model where force shall be evaluated."
import numpy as np
from itertools import product
import math
import matplotlib.pyplot as plt
import nanopores
gauss = np.polynomial.legendre.leggauss
nanopores.add_params(
h = 0.5,
hout = 1.,
Ry = 10.,
Rx = 3.,
)
def points(h, hout, r0, r, l0, Rx, Ry):
# effective pore radius for molecule
R = r0 - r
# get scaled 1D gauss quadrature nodes
# for x grid inside pore
# this has h = h/2 to resolve the thin region
k = int(math.ceil(R/h*2.))
x, w = gauss(2*k + 1)
x = R*x[k:]
# get uniform y grid inside pore
m = int(math.ceil(2.*l0/h))
y = np.linspace(-l0-r+h/2, l0+r-h/2, m)
# list of (x,y) values in pore
Xpore = list(product(x, y))
# gauss x grid outside
l = int(math.ceil(Rx/h))
x, w = gauss(2*l)
x = Rx-Rx*x[l:]
#x = np.linspace(0., Rx, l)
# gauss y grid outside
L = (Ry-l0-r)
n = int(math.ceil(L/hout))
y, w = gauss(2*n)
yp = l0+r + L + L*y[:n]
ym = -yp
Xtop = list(product(x, yp))
Xbot = list(product(x, ym))
# combine
X = Xtop + Xpore + Xbot
return X
# TODO: more points at edge y=l0 in x=0,r0 ?
# load parameters and create points
from nanopores.tools import fields
from nanopores.geometries.H_cyl_geo.params_geo import r0, rMolecule, l0, r1
X = points(h, hout, r0, rMolecule, l0/2, r1 + rMolecule, Ry)
x, y = [z[0] for z in X], [z[1] for z in X]
plt.scatter(x, y)
plt.show()
#fields.save_entries("xforce", PARAMS, X=X, N=len(X))
#print "Created and stored %d evaluation points." %(len(X),)
#fields.update() | 25.227273 | 75 | 0.596997 | 295 | 1,665 | 3.352542 | 0.383051 | 0.008089 | 0.044489 | 0.026289 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.030919 | 0.261862 | 1,665 | 66 | 76 | 25.227273 | 0.7738 | 0.329129 | 0 | 0 | 0 | 0 | 0.058824 | 0 | 0 | 0 | 0 | 0.015152 | 0 | 1 | 0.025641 | false | 0 | 0.179487 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77986af2fb4a4ca5a35c228e734e2824ebae00e3 | 248 | py | Python | object_oriented_programming/exercise_online_shopping/review.py | jepster/python_advanced_techniques | f4b0e0dda7b66be55f650f9f902e735d3f5a9f64 | [
"MIT"
] | null | null | null | object_oriented_programming/exercise_online_shopping/review.py | jepster/python_advanced_techniques | f4b0e0dda7b66be55f650f9f902e735d3f5a9f64 | [
"MIT"
] | null | null | null | object_oriented_programming/exercise_online_shopping/review.py | jepster/python_advanced_techniques | f4b0e0dda7b66be55f650f9f902e735d3f5a9f64 | [
"MIT"
] | null | null | null | class Review:
def __init__(self, content, user, product):
self.content = content
self.user = user
self.product = product
def __str__(self):
return f"Review of {self.product} by {self.user}: '{self.content}'" | 31 | 75 | 0.616935 | 31 | 248 | 4.677419 | 0.419355 | 0.227586 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.262097 | 248 | 8 | 75 | 31 | 0.79235 | 0 | 0 | 0 | 0 | 0 | 0.228916 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0 | 0.142857 | 0.571429 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 5 |
77a0fbf28b7a82846992531063e6db757b73b21f | 961 | py | Python | dk/topdanmark/pythonsampleapplication/calculator.py | ManishGandhiDodda/pythontest | 0b803eba8a0f78d63fea92471f0360b9458e258b | [
"MIT"
] | null | null | null | dk/topdanmark/pythonsampleapplication/calculator.py | ManishGandhiDodda/pythontest | 0b803eba8a0f78d63fea92471f0360b9458e258b | [
"MIT"
] | null | null | null | dk/topdanmark/pythonsampleapplication/calculator.py | ManishGandhiDodda/pythontest | 0b803eba8a0f78d63fea92471f0360b9458e258b | [
"MIT"
] | null | null | null | import math
"""
Class Calculator
This class does some simple mathematic operations.
"""
class Calculator:
"""Class Calculator"""
@staticmethod
def addition(a, b):
"""
addition
Takes two numbers and adds them.
"""
return a + b
@staticmethod
def subtraction(a, b):
"""
subtraction
Takes two numbers and subtracts the second from the first.
"""
return a - b
@staticmethod
def multiplication(a, b):
"""
multiplication
Takes two numbers and multiplies them.
"""
return a * b
@staticmethod
def division(a, b):
"""
division
Takes two numbers and divides the first by the first.
"""
return a/b
@staticmethod
def floor_it(a):
"""
floor_it
Rounds the number down to the nearest integer.
"""
return math.floor(a)
| 16.568966 | 66 | 0.5359 | 102 | 961 | 5.029412 | 0.401961 | 0.031189 | 0.116959 | 0.140351 | 0.226121 | 0.226121 | 0.120858 | 0 | 0 | 0 | 0 | 0 | 0.380853 | 961 | 57 | 67 | 16.859649 | 0.862185 | 0.319459 | 0 | 0.294118 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.294118 | false | 0 | 0.058824 | 0 | 0.705882 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
77a214f5777df7a0c6f55cff48bff29b3f5f4ff6 | 1,961 | py | Python | capture/command_scripts/stream_handler.py | CenturyLink/ExpertDHCP | 4dbcd36da7468b9a95a7869df19172fe890cefd2 | [
"MIT"
] | 1 | 2022-03-08T00:38:33.000Z | 2022-03-08T00:38:33.000Z | capture/command_scripts/stream_handler.py | aaronlumen/ExpertDHCP | 4dbcd36da7468b9a95a7869df19172fe890cefd2 | [
"MIT"
] | null | null | null | capture/command_scripts/stream_handler.py | aaronlumen/ExpertDHCP | 4dbcd36da7468b9a95a7869df19172fe890cefd2 | [
"MIT"
] | 2 | 2022-02-11T17:13:48.000Z | 2022-03-08T00:36:03.000Z | import subprocess
import shlex
import re
import json
class StreamHandler(object):
"""
A singleton class to handle the stream of dump and
return stdout in generator fashion
"""
__instance = None
def __init__(self,
dhcp_dump_command="sudo {} -i {}",
dump_path='/usr/bin/dhcpdump_json',
dump_if='eno1',
msg_sep_regex=r'(-{5,})'):
self.dhcp_dump_command = dhcp_dump_command.format(dump_path, dump_if)
self.message_seperator_pattern = re.compile(msg_sep_regex)
self.last_field = None
if StreamHandler.__instance is not None:
print("Instance already exists for singleton class StreamHandler")
raise Exception("This class is a singleton!")
else:
print("Created instance of class StreamHandler")
StreamHandler.__instance = self
def generate_dump(self):
try:
print(shlex.split(self.dhcp_dump_command))
dhcp_dump_stream_process = subprocess.Popen(shlex.split(self.dhcp_dump_command),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# print("Killable PID dhcpdump",dhcp_dump_stream_process.pid)
for msg in dhcp_dump_stream_process.stdout:
print('-' * 100)
print(msg)
print('-' * 100)
yield json.loads(msg)
except Exception as excp:
import sys, os
print("some exception occured in stream handler")
print(excp)
exc_type, exc_obj, exc_tb = sys.exc_info()
print('%s, %s, %s', exc_type, exc_obj, exc_tb)
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print('%s, %s, %s', str(exc_type), str(fname),
str(exc_tb.tb_lineno))
| 36.314815 | 92 | 0.562978 | 225 | 1,961 | 4.657778 | 0.422222 | 0.061069 | 0.071565 | 0.072519 | 0.123092 | 0.123092 | 0 | 0 | 0 | 0 | 0 | 0.007015 | 0.345742 | 1,961 | 53 | 93 | 37 | 0.809821 | 0.074452 | 0 | 0.05 | 0 | 0 | 0.128707 | 0.012311 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.125 | 0 | 0.225 | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77a2496f709dc36f20017daf8f44f502922f1145 | 4,389 | py | Python | pypdnsrest/parsers.py | raspi/pypdnsrest | d45f89721108d0718c791de91c2cdf31ab7cee9b | [
"Apache-2.0"
] | 2 | 2017-05-16T14:45:27.000Z | 2018-03-02T08:50:00.000Z | pypdnsrest/parsers.py | raspi/pypdnsrest | d45f89721108d0718c791de91c2cdf31ab7cee9b | [
"Apache-2.0"
] | 4 | 2017-08-19T03:46:25.000Z | 2019-04-04T14:13:51.000Z | pypdnsrest/parsers.py | raspi/pypdnsrest | d45f89721108d0718c791de91c2cdf31ab7cee9b | [
"Apache-2.0"
] | 1 | 2020-02-09T17:17:33.000Z | 2020-02-09T17:17:33.000Z | # -*- coding: utf8 -*-
"""
Convert REST JSON dict to DNSRecordBase classes
"""
import logging
log = logging.getLogger(__name__)
from datetime import timedelta
from .dnsrecords import DNSRecordBase
class RecordParser():
"""
Base parser class
"""
def __init__(self, *args, **kwargs):
pass
def parse(self, name: str, data: str, ttl: int) -> DNSRecordBase:
raise NotImplementedError(u"Parser not implemented.")
class SoaRecordParser(RecordParser):
def parse(self, name: str, data: str, ttl: int) -> DNSRecordBase:
from pypdnsrest.dnsrecords import DNSSoaRecord
from pypdnsrest.dnsrecords import DNSSoaRecordData
if data.count(u" ") != 6:
raise ValueError("Invalid value: '{0}'".format(data))
tmp = data.split(" ")
d = DNSSoaRecordData(nameserver=tmp[0], email=tmp[1], serial=int(tmp[2]),
refresh=timedelta(seconds=int(tmp[3])),
retry=timedelta(seconds=int(tmp[4])), expire=timedelta(seconds=int(tmp[5])),
ttl=timedelta(seconds=int(tmp[6])))
rec = DNSSoaRecord(name, timedelta(seconds=ttl))
rec.set_data(d)
return rec
class MxRecordParser(RecordParser):
def parse(self, name: str, data: str, ttl: int) -> DNSRecordBase:
from pypdnsrest.dnsrecords import DNSMxRecord
from pypdnsrest.dnsrecords import DNSMxRecordData
tmp = data.split(" ")
d = DNSMxRecordData(priority=int(tmp[0]), server=tmp[1])
rec = DNSMxRecord(name, timedelta(seconds=ttl))
rec.set_data(d)
return rec
class ARecordParser(RecordParser):
def parse(self, name: str, data: str, ttl: int) -> DNSRecordBase:
from ipaddress import IPv4Address
from pypdnsrest.dnsrecords import DNSARecord
rec = DNSARecord(name, timedelta(seconds=ttl))
rec.set_data(IPv4Address(data))
return rec
class AaaaRecordParser(RecordParser):
def parse(self, name: str, data: str, ttl: int) -> DNSRecordBase:
from ipaddress import IPv6Address
from pypdnsrest.dnsrecords import DNSAaaaRecord
rec = DNSAaaaRecord(name, timedelta(seconds=ttl))
rec.set_data(IPv6Address(data))
return rec
class CnameRecordParser(RecordParser):
def parse(self, name: str, data: str, ttl: int) -> DNSRecordBase:
from pypdnsrest.dnsrecords import DNSCNameRecord
rec = DNSCNameRecord(name, timedelta(seconds=ttl))
rec.set_data(data)
return rec
class NsRecordParser(RecordParser):
def parse(self, name: str, data: str, ttl: int) -> DNSRecordBase:
from pypdnsrest.dnsrecords import DNSNsRecord
rec = DNSNsRecord(name, timedelta(seconds=ttl))
rec.set_data(data)
return rec
class PtrRecordParser(RecordParser):
def _prepare_data(self, data: str):
data = data.lower()
data = data.replace(u"in-addr.arpa", '')
data = data.replace(u"ip6.arpa", '')
data = data.strip(".")
import re
data = re.sub("""[^\da-f\.]*""", '', data, flags=re.IGNORECASE)
if data.count(".") < 3:
raise ValueError("Invalid data: {0}".format(data))
# Reverse 1.0.168.192 -> 192.168.0.1
return ".".join(data.split(".")[::-1])
def parse(self, name: str, data: str, ttl: int) -> DNSRecordBase:
if data.lower().find(u"in-addr.arpa".lower()) > 0:
data = self._prepare_data(data)
from ipaddress import IPv4Address
data = IPv4Address(data)
elif data.lower().find(u"ip6.arpa".lower()) > 0:
data = self._prepare_data(data)
data = "".join(data.split("."))
data = ":".join([data[i:i + 4] for i in range(0, len(data), 4)])
from ipaddress import IPv6Address
data = IPv6Address(data)
else:
raise ValueError(u"Invalid PTR value: '{0}'".format(data))
from pypdnsrest.dnsrecords import DNSPtrRecord
rec = DNSPtrRecord(name, timedelta(seconds=ttl))
rec.set_data(data)
return rec
class TxtRecordParser(RecordParser):
def parse(self, name: str, data: str, ttl: int):
from pypdnsrest.dnsrecords import DNSTxtRecord
rec = DNSTxtRecord(name, timedelta(seconds=ttl))
rec.set_data(data)
return rec
| 33 | 105 | 0.621326 | 504 | 4,389 | 5.367063 | 0.22619 | 0.07098 | 0.088725 | 0.110906 | 0.392237 | 0.392237 | 0.392237 | 0.367837 | 0.343438 | 0.343438 | 0 | 0.014364 | 0.2545 | 4,389 | 132 | 106 | 33.25 | 0.812347 | 0.027797 | 0 | 0.326087 | 0 | 0 | 0.033938 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.119565 | false | 0.01087 | 0.195652 | 0 | 0.51087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
77a4bcb4eb5aca4624577ad893ccf4c83ff54da0 | 1,388 | py | Python | release/stubs.min/System/Security/AccessControl_parts/AuthorizationRuleCollection.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | release/stubs.min/System/Security/AccessControl_parts/AuthorizationRuleCollection.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | release/stubs.min/System/Security/AccessControl_parts/AuthorizationRuleCollection.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | class AuthorizationRuleCollection(ReadOnlyCollectionBase, ICollection, IEnumerable):
"""
Represents a collection of System.Security.AccessControl.AuthorizationRule objects.
AuthorizationRuleCollection()
"""
def AddRule(self, rule):
""" AddRule(self: AuthorizationRuleCollection,rule: AuthorizationRule) """
pass
def CopyTo(self, rules, index):
"""
CopyTo(self: AuthorizationRuleCollection,rules: Array[AuthorizationRule],index: int)
Copies the contents of the collection to an array.
rules: An array to which to copy the contents of the collection.
index: The zero-based index from which to begin copying.
"""
pass
def __getitem__(self, *args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args):
""" __iter__(self: IEnumerable) -> object """
pass
InnerList = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Gets the list of elements contained in the System.Collections.ReadOnlyCollectionBase instance.
"""
| 30.173913 | 221 | 0.657781 | 146 | 1,388 | 5.869863 | 0.417808 | 0.032672 | 0.056009 | 0.066511 | 0.192532 | 0.131855 | 0.131855 | 0.131855 | 0.131855 | 0.131855 | 0 | 0 | 0.232709 | 1,388 | 45 | 222 | 30.844444 | 0.804695 | 0.520173 | 0 | 0.416667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.416667 | false | 0.416667 | 0 | 0 | 0.583333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 3 |
77a4ff6d26d09b8ed1fd64033110e651fd78bc73 | 16,460 | py | Python | sudoku_solve2.py | alfille/sudoku_count | 871794378c866f024acfc495d20f45478f3a0b99 | [
"MIT"
] | null | null | null | sudoku_solve2.py | alfille/sudoku_count | 871794378c866f024acfc495d20f45478f3a0b99 | [
"MIT"
] | null | null | null | sudoku_solve2.py | alfille/sudoku_count | 871794378c866f024acfc495d20f45478f3a0b99 | [
"MIT"
] | null | null | null | import sys
import tkinter as tk
import tkinter.font as tkfont
import tkinter.filedialog as tkfile
import tkinter.messagebox as tkmessage
import argparse
import ctypes
import platform
import signal
def signal_handler(signal, frame):
print("\nForced end\n")
sys.exit(0)
class Persist(tk.Frame):
SUBSIZE = 3
X = False
Window = False
Debug = False
Fsize = 14
GameStatus="None"
Data = None
solve_lib = None
s_lib={}
Lib={}
mode = "normal"
Legal=True
@classmethod
def LibSet(cls):
for i in range(2,7):
cls.Lib[i] = False
@classmethod
def LibUse(cls):
if not cls.Lib[cls.SUBSIZE]:
# Shared C library
lib_base = "./" #location
lib_base += "sudoku2_lib" # base name
lib_base += str(cls.SUBSIZE*cls.SUBSIZE)
# get the right filename
if platform.uname()[0] == "Windows":
lib_base += ".dll"
if platform.uname()[0] == "Linux":
lib_base += ".so"
else:
lib_base += ".dylib"
# load library
cls.s_lib[cls.SUBSIZE] = ctypes.cdll.LoadLibrary(lib_base)
cls.Lib[cls.SUBSIZE] = True
cls.solve_lib = cls.s_lib[cls.SUBSIZE]
class Sudoku(tk.Frame):
color=["dark blue","yellow"]
solution = False
def __init__(self, master=None):
super().__init__(master)
self.SIZE = Persist.SUBSIZE * Persist.SUBSIZE
self.TOTALSIZE = self.SIZE * self.SIZE
self.master = master
self.option_setup()
self.Widget()
self.Menu()
if ( Persist.Data ):
self.SetData()
else:
self.BadData()
self.win.update()
self.set_win_sizes()
def set_win_sizes( self ):
# needed for popup
self.tilex=self.but[0][0].winfo_width()
self.tiley=self.but[0][0].winfo_height()
self.popx = self.tilex*Persist.SUBSIZE
self.popy = self.tiley*Persist.SUBSIZE
self.winx = self.win.winfo_screenwidth()
self.winy = self.win.winfo_screenheight()
def UnRed( self ):
if self.solution:
# need to clear old solution
for i in range(self.SIZE):
for j in range(self.SIZE):
self.but[i][j].configure(fg="black")
self.solution = False
def set_square(self,i,j,n):
self.but[i][j].configure(text=n)
self.popup_done( i , j )
self.UnRed()
def popup_done( self, i, j ):
self.pop.destroy()
self.but[i][j].configure(relief="raised")
self.Status()
def Status( self ):
self.status.configure(text="Edit mode")
if ( Persist.GameStatus == "Unique" ):
self.status.configure( text=["Unsolvable","Unique","Not unique"][self.Unique()] )
def popup_force_done( self, i, j, force ):
self.pop.destroy()
self.but[i][j].configure(relief="raised")
self.Status()
self.Popup(i,j,not force)
def Popup( self,i,j,force):
self.but[i][j].configure(relief="sunken")
self.pop=tk.Toplevel()
show = False
if Persist.Legal:
goodlist = self.Available(i,j)
else:
goodlist = [x+1 for x in range(self.SIZE)]
t = [0 for n in range(self.SIZE+1)]
# figure location
self.win.update()
x = self.but[i][j].winfo_rootx()+self.tilex
y = self.but[i][j].winfo_rooty()
if x+self.popx > self.winx:
x -= self.tilex+self.popx
if x<0:
x=0
if y+self.popy > self.winy:
y = self.winy - self.popy
self.pop.geometry('+%d+%d' % (x,y) )
for si in range(Persist.SUBSIZE):
for sj in range(Persist.SUBSIZE):
n = si*Persist.SUBSIZE+sj+1
t[n]=tk.Button(self.pop,text=str(n),borderwidth=3,height=1,width=1,font=self.font, state='normal' if (n in goodlist) or not force else 'disabled', command=lambda i=i,j=j,n=str(n): self.set_square(i,j,n))
t[n].grid(row=si+1,column=sj)
if 0 in goodlist:
tk.Button(self.pop,text="force" if force else "unforce",borderwidth=3,height=1,font=self.font,command=lambda i=i,j=j: self.popup_force_done(i,j,force)).grid(row=0,columnspan=Persist.SUBSIZE,sticky="EW")
tk.Button(self.pop,text="Clear",borderwidth=3,height=1,font=self.font,command=lambda i=i,j=j,n=" ": self.set_square(i,j,n)).grid(columnspan=Persist.SUBSIZE,sticky="EW")
tk.Button(self.pop,text="Back",borderwidth=3,height=1,font=self.font,command=lambda i=i,j=j: self.popup_done(i,j)).grid(columnspan=Persist.SUBSIZE,sticky="EW")
self.pop.grab_set()
def Clear(self):
self.status.configure(text="Clearing...")
for i in range(self.SIZE):
for j in range(self.SIZE):
self.but[i][j].configure(text=" ")
self.Status()
self.UnRed()
def Solve(self):
self.status.configure(text="Solving...")
self.master.update()
arr = (ctypes.c_int * self.TOTALSIZE)(-1)
k = 0
self.solution = True
for i in range(self.SIZE):
for j in range(self.SIZE):
arr[k] = -1 # default blank
t = self.but[i][j].cget('text')
if t != " ":
arr[k] = int(t) # 1-based values for squares
self.but[i][j].configure(fg="red")
else:
arr[k] = 0
k += 1
x = 1 if Persist.X else 0
w = 1 if Persist.Window else 0
d = 1 if Persist.Debug else 0
sol = Persist.solve_lib.Solve(x,w,d,arr)
while True:
if sol == 0:
self.status.configure(text="Not solvable")
for i in range(self.SIZE):
for j in range(self.SIZE):
if self.but[i][j].cget('fg') != 'red':
self.but[i][j].configure(text=" ")
self.master.update()
break
if sol == 1:
self.status.configure(text="Successfully solved")
k = 0
for i in range(self.SIZE):
for j in range(self.SIZE):
if arr[k] > 0 :
self.but[i][j].configure(text=str(arr[k])) # 1-based text values
if self.but[i][j].cget('fg') == 'blue':
self.but[i][j].configure(fg='black')
else:
self.but[i][j].configure(text=" ")
k += 1
self.master.update()
break
if sol < 0:
self.status.configure(text="<"+str(-sol)+"> Still solving...")
k = 0
for i in range(self.SIZE):
for j in range(self.SIZE):
if self.but[i][j].cget('fg') != 'red':
self.but[i][j].configure(fg='blue')
if arr[k] > 0 :
self.but[i][j].configure(text=str(arr[k])) # 1-based text values
else:
self.but[i][j].configure(text=" ")
k += 1
self.master.update()
sol = Persist.solve_lib.Resume()
def Test( self ):
if not self.just_test():
tkmessage.showinfo("Position test","Not valid")
def Available(self,testi,testj):
arr = (ctypes.c_int * self.TOTALSIZE)(-1)
ret = (ctypes.c_int * self.SIZE)(-1)
k = 0
for i in range(self.SIZE):
for j in range(self.SIZE):
t = self.but[i][j].cget('text')
if i==testi and j==testj: # blank tested location
arr[k] = 0
elif t != " ":
arr[k] = int(t) # 1-based values for squares
self.but[i][j].configure(fg="red")
else:
arr[k] = 0
k += 1
x = 1 if Persist.X else 0
w = 1 if Persist.Window else 0
d = 1 if Persist.Debug else 0
Persist.solve_lib.TestAvailable(x,w,d,testi, testj, arr,ret)
return ret
def Unique(self):
arr = (ctypes.c_int * self.TOTALSIZE)(-1)
k = 0
for i in range(self.SIZE):
for j in range(self.SIZE):
t = self.but[i][j].cget('text')
if t != " ":
arr[k] = int(t) # 1-based values for squares
else:
arr[k] = 0
k += 1
x = 1 if Persist.X else 0
w = 1 if Persist.Window else 0
d = 1 if Persist.Debug else 0
return Persist.solve_lib.TestUnique(x,w,d, arr)
def just_test(self):
arr = (ctypes.c_int * self.TOTALSIZE)(-1)
k = 0
for i in range(self.SIZE):
for j in range(self.SIZE):
t = self.but[i][j].cget('text')
if t != " ":
arr[k] = int(t) # 1-based values for squares
self.but[i][j].configure(fg="red")
else:
arr[k] = 0
k += 1
x = 1 if Persist.X else 0
w = 1 if Persist.Window else 0
d = 1 if Persist.Debug else 0
return (Persist.solve_lib.Test(x,w,d,arr)==1)
def Quit(self):
sys.exit()
self.master.quit()
def Widget(self):
self.win = tk.Frame(self.master,borderwidth=2,relief="flat",background="white")
self.win.pack(side="top")
self.buttons=tk.Frame(self.master,borderwidth=2,relief="flat",background="white")
if Persist.SUBSIZE > 2:
tk.Label(self.buttons,text=" {0}x{0} ".format(self.SIZE),relief="sunken",anchor="c",font=self.font).pack(side="left",fill=tk.Y)
tk.Button(self.buttons,text="Solve",command=self.Solve,font=self.font).pack(side="left")
tk.Button(self.buttons,text="Clear",command=self.Clear,font=self.font).pack(side="left")
tk.Button(self.buttons,text="Exit",command=self.Quit,font=self.font).pack(side="left")
self.status = tk.Label(self.buttons,text="Edit mode",relief="sunken",anchor="e")
self.status.pack(side="left",fill="both",expand=1)
self.buttons.pack(side="bottom",fill=tk.X)
self.but = [[0 for i in range(self.SIZE)] for j in range(self.SIZE)]
for si in range(Persist.SUBSIZE):
for sj in range(Persist.SUBSIZE):
f = tk.Frame(self.win,background=self.color[(si+sj)%2],borderwidth=2,relief="flat")
f = tk.Frame(self.win,background=self.color[(si+sj)%2],borderwidth=2,relief="flat")
f.grid(row=si,column=sj)
for ssi in range(Persist.SUBSIZE):
for ssj in range(Persist.SUBSIZE):
i = si*Persist.SUBSIZE+ssi
j = sj*Persist.SUBSIZE+ssj
self.but[i][j] = tk.Button(f,text=" ",borderwidth=3,height=1,width=1,font=self.font,command=lambda i=i,j=j: self.Popup(i,j,True))
self.but[i][j].grid(row=ssi, column=ssj)
if Persist.X and ((i==j) or (i == self.SIZE-j-1)):
self.but[i][j].configure(background="light yellow")
if Persist.Window:
if (i % (Persist.SUBSIZE+1) > 0) and (j % (Persist.SUBSIZE+1) > 0):
self.but[i][j].configure(background="aquamarine")
if Persist.X and ((i==j) or (i == self.SIZE-j-1)):
self.but[i][j].configure(background="pale green")
self.Status()
def BadData( self ):
for i in range(self.SIZE):
for j in range(self.SIZE):
self.but[i][j].configure(text=str(1+(i+j)%self.SIZE))
def SetData( self ):
for i in range(self.SIZE):
for j in range(self.SIZE):
self.but[i][j].configure(fg='black')
s = str(Persist.Data[i][j])
if s == '0':
s = ' '
self.but[i][j].configure(text=s) # 1-based text values
Persist.Data = None
def about(self):
print("Sudoku Solve by Paul Alfille 2020")
def Size(self) :
if ( self.ss_choose != Persist.SUBSIZE ):
Persist.SUBSIZE = self.ss_choose.get()
self.master.destroy()
def Option(self):
if Persist.X != self.X.get():
Persist.X = self.X.get()
self.master.destroy()
if Persist.Window != self.Window.get():
Persist.Window = self.Window.get()
self.master.destroy()
Persist.Debug = self.Debug.get()
Persist.Legal = self.Legal.get()
def option_setup(self):
# match with Option(), set in Menu()
self.font = tkfont.Font(weight="bold",size=Persist.Fsize)
#self.pack()
self.X = tk.BooleanVar()
self.X.set(Persist.X)
self.Window = tk.BooleanVar()
self.Window.set(Persist.Window)
self.Debug = tk.BooleanVar()
self.Debug.set(Persist.Debug)
self.Legal = tk.BooleanVar()
self.Legal.set(Persist.Legal)
def fsize( self, f ):
if ( f != Persist.Fsize ) :
Persist.Fsize = f
self.master.destroy()
def set_status( self, s ):
Persist.GameStatus = s
self.Status()
def Menu(self):
self.menu = tk.Menu(self.master,tearoff=0)
self.filemenu = tk.Menu(self.menu,tearoff=0)
self.menu.add_cascade(label="File",menu=self.filemenu,font=self.font)
self.filemenu.add_command(label="Load",command=self.Load,font=self.font)
self.filemenu.add_command(label="Save",command=self.Save,font=self.font)
self.filemenu.add_command(label="Solve",command=self.Solve,font=self.font)
self.filemenu.add_command(label="Test",command=self.Test,font=self.font)
self.filemenu.add_command(label="Clear",command=self.Clear,font=self.font)
self.filemenu.add_command(label="Exit",command=self.Quit,font=self.font)
self.sizemenu = tk.Menu(self.menu,tearoff=0)
self.menu.add_cascade(label="Size",menu=self.sizemenu,font=self.font)
self.ss_choose = tk.IntVar()
ss_choose = Persist.SUBSIZE
for ss in range(2,7):
self.sizemenu.add_radiobutton(label=("> " if ss == Persist.SUBSIZE else "")+str(ss*ss)+"x"+str(ss*ss), value=ss, variable=self.ss_choose, command=self.Size,font=self.font)
self.optmenu = tk.Menu(self.menu,tearoff=0)
self.menu.add_cascade(label="Options",menu=self.optmenu,font=self.font)
self.optmenu.add_checkbutton(label="X pattern",onvalue=True,offvalue=False,variable=self.X,font=self.font,command=self.Option)
self.optmenu.add_checkbutton(label="Window pane",onvalue=True,offvalue=False,variable=self.Window,font=self.font,command=self.Option)
self.optmenu.add_checkbutton(label="Debugging data",onvalue=True,offvalue=False,variable=self.Debug,font=self.font,command=self.Option)
self.optmenu.add_checkbutton(label="Legal choices",onvalue=True,offvalue=False,variable=self.Legal,font=self.font,command=self.Option)
self.fontmenu = tk.Menu(self.optmenu,tearoff=0)
self.optmenu.add_cascade(label="Font size",menu=self.fontmenu,font=self.font)
for ff in [6,8,10,14,18,22,26]:
self.fontmenu.add_command(label=("> " if ff==Persist.Fsize else "")+str(ff),font=self.font, command=lambda ff=ff: self.fsize(ff))
self.statusmenu = tk.Menu(self.optmenu,tearoff=0)
self.optmenu.add_cascade(label="Game state status",menu=self.statusmenu,font=self.font)
for ss in ["None","Unique"]:
self.statusmenu.add_command(label=("> " if ss==Persist.GameStatus else "")+ss, font=self.font, command=lambda ss=ss: self.set_status(ss))
self.helpmenu = tk.Menu(self.menu,tearoff=0)
self.menu.add_cascade(label="Help",menu=self.helpmenu,font=self.font)
self.helpmenu.add_command(label="About",command=self.about,font=self.font)
self.master.config(menu=self.menu)
def Load( self ):
Lfile = tkfile.askopenfile(mode="r",filetypes=[("Comma-separated-values","*.csv"),("All files","*.*")],title="Load a sudoku",parent=self.master)
if Lfile:
try:
i = 0
Window = False
X = False
for line in Lfile:
if '#' in line:
[line,comment]=line.split('#')
if "=" in comment:
[var,val] = line.split("=")
else:
[var,val] = [comment,"true"]
if var == "window":
Window = (val=="true")
if var == "X":
X = (val=="true")
if ',' in line:
v = line.split(',')
if i == 0 :
#first line, check size
Lsize = len(v)
if Lsize not in [4,9,16,25,36]:
tkmessage.showerror("File error","Not a recognized size")
Persist.Data=None
break
Persist.Data = [[0 for i in range(self.SIZE)] for j in range(self.SIZE)]
else:
if len(v) != Lsize:
tkmessage.showerror("File error","Value lists not the same size")
Persist.Data=None
break
Persist.Data[i] = [int(x) if x.isnumeric() else 0 for x in list(map(lambda s:s.strip(),v)) ]
if max(Persist.Data[i]) > Lsize or min(Persist.Data[i]) < 0 :
tkmessage.showerror("File error","Value out of range")
Persist.Data=None
break
i += 1
if i == Lsize:
#done
Persist.X = X
Persist.Window = Window
Persist.SUBSIZE = [x*x for x in range(7)].index(Lsize)
self.master.destroy()
Lfile.close()
except UnicodeDecodeError:
tkmessage.showerror("Unreadable","File contains unreadable characters")
Persist.Data=None
return
def Save( self ):
filename = tkfile.asksaveasfilename(filetypes=[("Comma-separated-values","*.csv"),("All files","*.*")],title="Save this sudoku board",parent=self.master)
if filename:
with open(filename,'w') as Sfile:
if Persist.Window:
Sfile.write("# window\n")
if Persist.X:
Sfile.write("# X\n")
Sfile.write("\n".join([",".join([self.but[i][j].cget("text") for j in range(self.SIZE)]) for i in range(self.SIZE)])+"\n")
def Libs():
# returns a dict
s_lib={}
for ss in range(2,7):
# Shared C library
lib_base = "./" #location
lib_base += "sudoku2_lib" # base name
lib_base += str(ss*ss)
# get the right filename
if platform.uname()[0] == "Windows":
lib_base += ".dll"
if platform.uname()[0] == "Linux":
lib_base += ".so"
else:
lib_base += ".dylib"
# load library
global solve_lib
s_lib[ss] = ctypes.cdll.LoadLibrary(lib_base)
return s_lib
def main(args):
# keyboard interrupt
signal.signal(signal.SIGINT, signal_handler)
# set up library dist
#global solve_lib
#s_lib = Libs()
Persist.LibSet()
while True:
# load library
Persist.LibUse()
Sudoku(master=tk.Tk()).mainloop()
if __name__ == "__main__":
# execute only if run as a script
sys.exit(main(sys.argv))
| 30.881801 | 207 | 0.646355 | 2,634 | 16,460 | 3.998481 | 0.129841 | 0.010444 | 0.025826 | 0.029054 | 0.50883 | 0.43496 | 0.396316 | 0.352165 | 0.321401 | 0.312856 | 0 | 0.011716 | 0.18068 | 16,460 | 532 | 208 | 30.93985 | 0.769242 | 0.036817 | 0 | 0.372685 | 0 | 0 | 0.064223 | 0.002781 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.020833 | 0 | 0.143519 | 0.00463 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77a51fcecb6daeb1528498330dad731bde24172c | 1,549 | py | Python | Text_to_speech_GAN/waveFiles.py | scotty110/ANN-code | 05ae6094dfe98c1c9fd0feb87ffb0c0c5206502a | [
"MIT"
] | null | null | null | Text_to_speech_GAN/waveFiles.py | scotty110/ANN-code | 05ae6094dfe98c1c9fd0feb87ffb0c0c5206502a | [
"MIT"
] | null | null | null | Text_to_speech_GAN/waveFiles.py | scotty110/ANN-code | 05ae6094dfe98c1c9fd0feb87ffb0c0c5206502a | [
"MIT"
] | null | null | null | import scipy.io.wavfile as siow
import scipy.signal as ssr
import matplotlib.pyplot as plt
import numpy as np
import math
def groupNumpy(to_group_array, interval, debug=True):
'''
Breaks numpy array into an array of arrays. Where each array is <interval> long.
Inputs:
to_group_array - array we wish to break down into sub intervals
interval - interval length we wish to break down into
Outputs:
2D array of size [len/interval, interval]
'''
n = math.ceil( len(to_group_array)/(interval*1.0) )
if(debug):
print("array lenth: ", len(to_group_array))
print("N: ", n)
# Make 2D array
output_array = np.zeros((n,interval,2))
for i in range(0,n,1):
output_array[i] = to_group_array[(i*interval):((i+1)*interval)]
return output_array
filename = "hello.wav"
audio_tuple = siow.read(filename)
audio_array = audio_tuple[1]
print("Sample rate: ", audio_tuple[0])
#Assuming sample rate is 16 khz, want to break into 20 ms chucks
grouped_array = groupNumpy(audio_array, 320)
#print("differenc: ", grouped_array[1])
#print("true segment: ", audio_array[320:2*320])
print("Difference: ", np.sum(grouped_array[1]-audio_array[320:2*320]) )
#2D sound is multi channel sound
'''
#Plots look about the same (probably are), so assuming down sampling works for now
plt.plot( audio_new, color="orange" )
plt.ylabel('sound wave')
plt.show()
'''
'''
Used as a outline for code:
https://medium.com/@ageitgey/machine-learning-is-fun-part-6-how-to-do-speech-recognition-with-deep-learning-28293c162f7a
''' | 28.163636 | 120 | 0.712718 | 247 | 1,549 | 4.37247 | 0.489879 | 0.032407 | 0.055556 | 0.037037 | 0.07037 | 0.038889 | 0 | 0 | 0 | 0 | 0 | 0.033742 | 0.158167 | 1,549 | 55 | 121 | 28.163636 | 0.794479 | 0.297611 | 0 | 0 | 0 | 0 | 0.067295 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.25 | 0 | 0.35 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77a564e8a33b0b8bdb5861341a41f636b8f577fb | 389 | py | Python | test/test_nesting.py | zauberzeug/binding | 8b09b15e3675c68850b81d7253f1eb24245ee76b | [
"MIT"
] | 1 | 2022-01-07T03:22:19.000Z | 2022-01-07T03:22:19.000Z | test/test_nesting.py | zauberzeug/binding | 8b09b15e3675c68850b81d7253f1eb24245ee76b | [
"MIT"
] | null | null | null | test/test_nesting.py | zauberzeug/binding | 8b09b15e3675c68850b81d7253f1eb24245ee76b | [
"MIT"
] | null | null | null | from binding import BindableProperty
class A:
x = BindableProperty()
def __init__(self, x):
self.x = x
def bind_x_to(self, target):
self.x.bind_to(target, nesting=1)
class B:
y = BindableProperty()
def __init__(self, y):
self.y = y
def test_nesting():
a = A(1)
b = B(2)
a.bind_x_to(b.y)
assert a.x == 1 and b.y == 1
| 12.966667 | 41 | 0.562982 | 62 | 389 | 3.306452 | 0.33871 | 0.073171 | 0.22439 | 0.263415 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018657 | 0.311054 | 389 | 29 | 42 | 13.413793 | 0.746269 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0625 | 1 | 0.25 | false | 0 | 0.0625 | 0 | 0.5625 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 4 |
77a746913532d4c25ca26f69e46343bc6271ae0e | 790 | py | Python | Tools/Individual Tools/Big Build Calculator.py | TheUncannyScrub/PythonMinecraftTools | 1ff5cf53f195cff41c33dc39a461ee94c6edf510 | [
"MIT"
] | null | null | null | Tools/Individual Tools/Big Build Calculator.py | TheUncannyScrub/PythonMinecraftTools | 1ff5cf53f195cff41c33dc39a461ee94c6edf510 | [
"MIT"
] | null | null | null | Tools/Individual Tools/Big Build Calculator.py | TheUncannyScrub/PythonMinecraftTools | 1ff5cf53f195cff41c33dc39a461ee94c6edf510 | [
"MIT"
] | null | null | null | import os
import sys
import random
import math
from time import *
import decimal
print('Minecraft Resource Calculator')
print('Enter the individual items and the calculator will')
print('tell you how many chest or stacks it is!')
print('*Only works for items that stack up to 64*')
sleep(1)
while True:
try:
print('Enter an amount of individual items')
numinput = float(input('Here: '))
stacks = (numinput) / 64
chests = (numinput) / 1728
dubchest = (numinput) / 3456
print(round(stacks,2), "Stack(s)")
print(round(chests,2), "Chest(s)")
print(round(dubchest,2), "Double Chest(s)")
break
except:
print('You must enter a number!')
sleep(2)
input('Press ENTER to exit') | 25.483871 | 60 | 0.616456 | 105 | 790 | 4.638095 | 0.590476 | 0.061602 | 0.045175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.029463 | 0.26962 | 790 | 31 | 61 | 25.483871 | 0.814558 | 0 | 0 | 0 | 0 | 0 | 0.362681 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.230769 | 0 | 0.230769 | 0.346154 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77abe08317bc71eea3c52df6cb5e00aa0db46e67 | 8,183 | py | Python | venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2_grpc.py | choonho/plugin-prometheus-mon-webhook | afa7d65d12715fd0480fb4f92a9c62da2d6128e0 | [
"Apache-2.0"
] | null | null | null | venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2_grpc.py | choonho/plugin-prometheus-mon-webhook | afa7d65d12715fd0480fb4f92a9c62da2d6128e0 | [
"Apache-2.0"
] | null | null | null | venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2_grpc.py | choonho/plugin-prometheus-mon-webhook | afa7d65d12715fd0480fb4f92a9c62da2d6128e0 | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from spaceone.api.identity.v1 import domain_owner_pb2 as spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2
class DomainOwnerStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.create = channel.unary_unary(
'/spaceone.api.identity.v1.DomainOwner/create',
request_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.CreateDomainOwner.SerializeToString,
response_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
)
self.update = channel.unary_unary(
'/spaceone.api.identity.v1.DomainOwner/update',
request_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.UpdateDomainOwner.SerializeToString,
response_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
)
self.delete = channel.unary_unary(
'/spaceone.api.identity.v1.DomainOwner/delete',
request_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.get = channel.unary_unary(
'/spaceone.api.identity.v1.DomainOwner/get',
request_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.GetDomainOwnerRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
)
class DomainOwnerServicer(object):
"""Missing associated documentation comment in .proto file."""
def create(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def update(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def get(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DomainOwnerServicer_to_server(servicer, server):
rpc_method_handlers = {
'create': grpc.unary_unary_rpc_method_handler(
servicer.create,
request_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.CreateDomainOwner.FromString,
response_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.SerializeToString,
),
'update': grpc.unary_unary_rpc_method_handler(
servicer.update,
request_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.UpdateDomainOwner.FromString,
response_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.SerializeToString,
),
'delete': grpc.unary_unary_rpc_method_handler(
servicer.delete,
request_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'get': grpc.unary_unary_rpc_method_handler(
servicer.get,
request_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.GetDomainOwnerRequest.FromString,
response_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'spaceone.api.identity.v1.DomainOwner', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class DomainOwner(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.identity.v1.DomainOwner/create',
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.CreateDomainOwner.SerializeToString,
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.identity.v1.DomainOwner/update',
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.UpdateDomainOwner.SerializeToString,
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.identity.v1.DomainOwner/delete',
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.identity.v1.DomainOwner/get',
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.GetDomainOwnerRequest.SerializeToString,
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 49 | 135 | 0.70121 | 857 | 8,183 | 6.240373 | 0.12252 | 0.047307 | 0.060209 | 0.069933 | 0.867801 | 0.861818 | 0.848729 | 0.814136 | 0.753179 | 0.731862 | 0 | 0.009519 | 0.229745 | 8,183 | 166 | 136 | 49.295181 | 0.838966 | 0.076867 | 0 | 0.548872 | 1 | 0 | 0.078455 | 0.051056 | 0 | 0 | 0 | 0 | 0 | 1 | 0.075188 | false | 0 | 0.022556 | 0.030075 | 0.150376 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
77ac836929fce76c6241d61d0546b8b5bdacebd5 | 10,283 | py | Python | data/GoogleTrans/preprocess.py | wasiahmad/GATE | 1e48504a3641f00265a271a19eb6b6449fdc33bd | [
"MIT"
] | 24 | 2020-12-07T10:22:40.000Z | 2022-03-31T09:24:13.000Z | data/GoogleTrans/preprocess.py | wasiahmad/GATE | 1e48504a3641f00265a271a19eb6b6449fdc33bd | [
"MIT"
] | 15 | 2021-03-22T04:52:57.000Z | 2022-01-01T18:32:31.000Z | data/GoogleTrans/preprocess.py | wasiahmad/GATE | 1e48504a3641f00265a271a19eb6b6449fdc33bd | [
"MIT"
] | 8 | 2021-03-04T05:09:42.000Z | 2022-01-25T12:59:19.000Z | import json
import os
from udpipe import Model
from conllu import parse
from collections import OrderedDict
model_map = {
'en': 'udpipe/english-ewt-ud-2.5-191206.udpipe',
'zh': 'udpipe/chinese-gsd-ud-2.5-191206.udpipe',
'ar': 'udpipe/arabic-padt-ud-2.5-191206.udpipe'
}
def find_span(offsets, begin_offset, end_offset):
"""Match token offsets with the char begin/end offsets of the answer."""
start = [i for i, tok in enumerate(offsets) if tok[0] == begin_offset]
if len(start) == 0:
start = [i for i, tok in enumerate(offsets) if tok[0] < begin_offset < tok[1]]
end = [i for i, tok in enumerate(offsets) if tok[1] == end_offset]
if len(end) == 0:
end = [i for i, tok in enumerate(offsets) if tok[0] < end_offset < tok[1]]
assert (len(start) <= 1)
assert (len(end) <= 1)
if len(start) == 1 and len(end) == 1:
return start[0], end[0]
return False
def load_conllu(conllu_text):
conllu_data = []
sentences = parse(conllu_text)
for idx, sentence in enumerate(sentences):
tokens, upos, head, deprel, offset = [], [], [], [], []
reserved_offsets = []
for widx, word in enumerate(sentence):
if isinstance(word['id'], tuple):
# multi-word token, e.g., word['id'] = (4, '-', 5)
assert len(word['id']) == 3
indices = word['misc']['TokenRange'].split(':')
reserved_offsets.append([int(indices[0]), int(indices[1])])
else:
tokens.append(word['form'])
upos.append(word['upostag'])
head.append(word['head'])
deprel.append(word['deprel'])
if word['misc'] is not None:
# single-word token
indices = word['misc']['TokenRange'].split(':')
offset.append([int(indices[0]), int(indices[1])])
elif len(reserved_offsets) > 0:
offset.append(reserved_offsets.pop())
else:
offset.append([-1, -1])
assert len(tokens) == len(offset)
sent_obj = OrderedDict([
('token', tokens),
('stanford_pos', upos),
('stanford_head', head),
('stanford_deprel', deprel),
('offset', offset)
])
conllu_data.append(sent_obj)
return conllu_data
def preprocess(srcfile):
with open(srcfile) as f:
data = json.load(f)
confusing = 0
returned_data = []
for ex in data:
if ex['parallel'].count('<b>') > 1:
confusing += 1
continue
elif ex['parallel'].count('</b>') > 1:
confusing += 1
continue
elif ex['parallel'].count('<i>') > 1:
confusing += 1
continue
elif ex['parallel'].count('</i>') > 1:
confusing += 1
continue
parallel_sent = ex['parallel']
subj_start = parallel_sent.find('<b>')
subj_end = parallel_sent.find('</b>')
obj_start = parallel_sent.find('<i>')
obj_end = parallel_sent.find('</i>')
if subj_start > subj_end:
confusing += 1
continue
elif obj_start > obj_end:
confusing += 1
continue
if subj_end < obj_start:
# subj is in the left of obj
position = ['subj_start', 'subj_end', 'obj_start', 'obj_end']
elif obj_end < subj_start:
# obj is in the left of subj
position = ['obj_start', 'obj_end', 'subj_start', 'subj_end']
elif subj_start < obj_start < subj_end:
position = ['subj_start', 'obj_start']
if obj_end < subj_end:
position += ['obj_end', 'subj_end']
else:
position += ['subj_end', 'obj_end']
elif obj_start < subj_start < obj_end:
position = ['obj_start', 'subj_start']
if subj_end < obj_end:
position += ['subj_end', 'obj_end']
else:
position += ['obj_end', 'subj_end']
else:
raise ValueError()
# print(parallel_sent)
for item in position:
if item == 'subj_start':
real_ss = parallel_sent.find('<b>')
parallel_sent = parallel_sent.replace('<b>', '')
elif item == 'subj_end':
real_se = parallel_sent.find('</b>')
parallel_sent = parallel_sent.replace('</b>', '')
elif item == 'obj_start':
real_os = parallel_sent.find('<i>')
parallel_sent = parallel_sent.replace('<i>', '')
elif item == 'obj_end':
real_oe = parallel_sent.find('</i>')
parallel_sent = parallel_sent.replace('</i>', '')
if real_ss == real_se:
confusing += 1
continue
elif real_os == real_oe:
confusing += 1
continue
ex['parallel'] = {
'sentence': parallel_sent,
'subj_pos': [real_ss, real_se],
'obj_pos': [real_os, real_oe],
'source': ex['parallel']
}
returned_data.append(ex)
# print(parallel_sent)
# print(position)
# print(real_ss, real_se, real_os, real_oe)
# print()
print('Out of %d examples, %d are dropped!' % (len(data), confusing))
return returned_data
def get_conllu_text(text, model):
sentences = model.tokenize(text, 'ranges;presegmented')
total_words = 0
for s in sentences:
total_words += len(s.words)
model.tag(s)
model.parse(s)
conllu = model.write(sentences, "conllu")
return conllu
def convert_char_to_word_indices(parallel_data, tgtfile, lang):
model = Model(model_map[lang])
skipped = 0
for ex in parallel_data:
trans_sent = ex['parallel']['sentence']
conllu_text = get_conllu_text(trans_sent, model)
conll_ex = load_conllu(conllu_text)
assert len(conll_ex) == 1
conll_ex = conll_ex[0]
subj_start_char, subj_end_char = ex['parallel']['subj_pos']
obj_start_char, obj_end_char = ex['parallel']['obj_pos']
subj_start_end = find_span(conll_ex['offset'], subj_start_char, subj_end_char)
obj_start_end = find_span(conll_ex['offset'], obj_start_char, obj_end_char)
if not subj_start_end:
print(conll_ex['token'])
print(conll_ex['offset'])
print(trans_sent[subj_start_char:subj_end_char])
print(subj_start_char, subj_end_char)
skipped += 1
continue
if not obj_start_end:
print(conll_ex['token'])
print(conll_ex['offset'])
print(trans_sent[obj_start_char:obj_end_char])
print(obj_start_char, obj_end_char)
skipped += 1
continue
ex['source'] = ex['sentence']
ex['parallel'].pop('subj_pos')
ex['parallel'].pop('obj_pos')
ex.pop('sentence')
ex.pop('subj')
ex.pop('obj')
ex.pop('token')
ex['translation'] = ex['parallel']['source']
ex.pop('parallel')
ex['token'] = conll_ex['token']
ex['stanford_pos'] = conll_ex['stanford_pos']
ex['stanford_head'] = conll_ex['stanford_head']
ex['stanford_deprel'] = conll_ex['stanford_deprel']
ex['stanford_ner'] = ["O"] * len(ex['token'])
ex['subj_start'] = subj_start_end[0]
ex['subj_end'] = subj_start_end[1]
ex['obj_start'] = obj_start_end[0]
ex['obj_end'] = obj_start_end[1]
ex['subj_type'] = ex['subj_type']
ex['obj_type'] = ex['obj_type']
ex['relation'] = ex['relation']
if skipped > 0:
print('%d examples are skipped since we cannot resolve their character indices.' % skipped)
with open(tgtfile, 'w') as fw:
json.dump(parallel_data, fw, sort_keys=True, indent=4)
def filter_source_examples(selected_ids, src_file, tgt_file):
selected_data = []
with open(src_file) as f:
data = json.load(f)
data_dict = {ex['id']: ex for ex in data}
for idx in selected_ids:
selected_data.append(data_dict[idx])
assert len(selected_data) == len(selected_ids), \
'{} != {}'.format(len(selected_data), len(selected_ids))
with open(tgt_file, 'w') as fw:
json.dump(selected_data, fw, sort_keys=True, indent=4)
if __name__ == '__main__':
new_data = preprocess('ace_event/en_test_zh.json')
convert_char_to_word_indices(new_data,
'ace_event/Chinese/test_parallel.json',
'zh')
selected_ids = [ex['id'] for ex in new_data]
filter_source_examples(selected_ids,
'../data/ace_event/English/test.json',
'ace_event/Chinese/test_source.json')
new_data = preprocess('ace_relation/en_test_zh.json')
convert_char_to_word_indices(new_data,
'ace_relation/Chinese/test_parallel.json',
'zh')
selected_ids = [ex['id'] for ex in new_data]
filter_source_examples(selected_ids,
'../data/ace_relation/English/test.json',
'ace_relation/Chinese/test_source.json')
new_data = preprocess('ace_event/en_test_ar.json')
convert_char_to_word_indices(new_data,
'ace_event/Arabic/test_parallel.json',
'ar')
selected_ids = [ex['id'] for ex in new_data]
filter_source_examples(selected_ids,
'../data/ace_event/English/test.json',
'ace_event/Arabic/test_source.json')
new_data = preprocess('ace_relation/en_test_ar.json')
convert_char_to_word_indices(new_data,
'ace_relation/Arabic/test_parallel.json',
'ar')
selected_ids = [ex['id'] for ex in new_data]
filter_source_examples(selected_ids,
'../data/ace_relation/English/test.json',
'ace_relation/Arabic/test_source.json')
| 36.207746 | 99 | 0.550812 | 1,254 | 10,283 | 4.263955 | 0.135566 | 0.044885 | 0.026931 | 0.020572 | 0.43127 | 0.372732 | 0.312138 | 0.275856 | 0.266879 | 0.266879 | 0 | 0.010541 | 0.31732 | 10,283 | 283 | 100 | 36.335689 | 0.75114 | 0.030341 | 0 | 0.242553 | 0 | 0 | 0.166014 | 0.065984 | 0 | 0 | 0 | 0 | 0.025532 | 1 | 0.025532 | false | 0 | 0.021277 | 0 | 0.068085 | 0.042553 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77ae2c52823c3ed666b8da7d385a9528b00e5196 | 4,034 | py | Python | bricklayer/util/parallel_fetch.py | loganwang007/bricklayer | 531dd4acaf20574a9d2f7f0adf68789888288157 | [
"Apache-2.0"
] | null | null | null | bricklayer/util/parallel_fetch.py | loganwang007/bricklayer | 531dd4acaf20574a9d2f7f0adf68789888288157 | [
"Apache-2.0"
] | null | null | null | bricklayer/util/parallel_fetch.py | loganwang007/bricklayer | 531dd4acaf20574a9d2f7f0adf68789888288157 | [
"Apache-2.0"
] | null | null | null | """
Module to distribute the S3 download over a spark cluster
Useful when the data is highly partitioned and unable to be loaded by standard methods
Results end up in a table
Usage:
```
from parallel_fetch import DbricksParallelFetch
# define the aws_bucket and output_dir for the s3_fetch to start
aws_bucket = "service-trips"
output_dir = "/tmp/"
# define the target df awaiting to be parse the path
df = Spark.createDataFrame()
# export the fetched contents dataframe
output_df = DbricksParallelFetch.download_file(df, aws_bucket, output_dir, path_column)
```
"""
from concurrent.futures import ThreadPoolExecutor, as_completed
from functools import partial
from pyspark.sql.functions import pandas_udf, PandasUDFType
from pyspark.sql import DataFrame
import logging
import os
import boto3
import csv
class DbricksParallelFetch:
@staticmethod
def download_file(df: DataFrame, aws_bucket: str, output_dir: str, path_column: str, max_workers: int = 32):
"""encapsulate the pandas udf function as a static method
Args:
df (DataFrame): target dataframe
aws_bucket (str): aws bucket stored all the small files
output_dir (str): temporary output dir
path_column (str): path column in the target dataframe
max_workers (int): number of processors
Returns:
[DataFrame]: [output dataframe with downloaded content]
"""
@pandas_udf('string', PandasUDFType.SCALAR)
def s3_fetch(paths):
def download_one_file(bucket: str, output: str, client: boto3.client, s3_file: str):
"""
Download a single file from S3
Args:
bucket (str): S3 bucket where images are hosted
output (str): Dir to store the images
client (boto3.client): S3 client
s3_file (str): S3 object name
"""
client.download_file(
Bucket=bucket, Key=s3_file,
Filename=os.path.join(output, s3_file.replace('/', '_'))
)
files_to_download = paths
# Creating only one session and one client
session = boto3.Session()
client = session.client("s3")
# The client is shared between threads
func = partial(download_one_file, aws_bucket, output_dir, client)
# List for storing possible failed downloads to retry later
failed_downloads = []
with ThreadPoolExecutor(max_workers) as executor:
# Using a dict for preserving the downloaded file for each future
# to store it as a failure if we need that
futures = {
executor.submit(func, file_to_download):
file_to_download for file_to_download in files_to_download
}
for future in as_completed(futures):
if future.exception():
failed_downloads.append(futures[future])
if len(failed_downloads) > 0:
with open(
os.path.join(output_dir, "failed_downloads.csv"), "w", newline=""
) as csvfile:
writer = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
writer.writerow(failed_downloads)
def read_file_and_return_contents(path):
try:
with open(output_dir + path.replace('/', '_'), 'r') as file:
logging.info(f"Read {file} and return its value")
return file.read()
except FileNotFoundError:
logging.warning("Messages is failed to download from s3")
return None
return paths.apply(read_file_and_return_contents)
return df.withColumn('downloaded_content', s3_fetch(path_column))
| 42.020833 | 112 | 0.595439 | 458 | 4,034 | 5.09607 | 0.368996 | 0.034704 | 0.01671 | 0.021851 | 0.021422 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007845 | 0.336391 | 4,034 | 95 | 113 | 42.463158 | 0.864027 | 0.341101 | 0 | 0 | 0 | 0 | 0.050982 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.085106 | false | 0 | 0.170213 | 0 | 0.361702 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77af594f90cdfff28c32483019a29f1ca17a171f | 621 | py | Python | algorithms/mergesort.py | MatheusRV/Analysis-of-Algorithms-Course | 4c1f4ccb8349b894653128be7e38d3045b0f5c13 | [
"MIT"
] | null | null | null | algorithms/mergesort.py | MatheusRV/Analysis-of-Algorithms-Course | 4c1f4ccb8349b894653128be7e38d3045b0f5c13 | [
"MIT"
] | null | null | null | algorithms/mergesort.py | MatheusRV/Analysis-of-Algorithms-Course | 4c1f4ccb8349b894653128be7e38d3045b0f5c13 | [
"MIT"
] | null | null | null | def merge(left, right):
"""Merge sort merging function."""
merged_array=[]
while left or right:
if not left:
merged_array.append(right.pop())
elif (not right) or left[-1] > right[-1]:
merged_array.append(left.pop())
else:
merged_array.append(right.pop())
merged_array.reverse()
return merged_array
def merge_sort(array, size):
"""Merge sort algorithm implementation."""
if size < 2: # base case
return array
else:
# divide array in half and merge sort recursively
half = size // 2
left = merge_sort(array[:half], half)
right = merge_sort(array[half:], half)
return merge(left, right)
| 25.875 | 51 | 0.690821 | 91 | 621 | 4.615385 | 0.340659 | 0.128571 | 0.121429 | 0.104762 | 0.22381 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007797 | 0.173913 | 621 | 23 | 52 | 27 | 0.810916 | 0.199678 | 0 | 0.210526 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0 | 0 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77af69141958979d91a8946a1f5ae3a3eb8f9bf0 | 2,874 | py | Python | SyracuseNews.py | islubee/Discord-Bots | c504adf6af09e68809038e7c4f0720cb8022f982 | [
"MIT"
] | null | null | null | SyracuseNews.py | islubee/Discord-Bots | c504adf6af09e68809038e7c4f0720cb8022f982 | [
"MIT"
] | null | null | null | SyracuseNews.py | islubee/Discord-Bots | c504adf6af09e68809038e7c4f0720cb8022f982 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
import urllib.request as urllib2
import re
import pickle
import logging
import ssl
import urllib.parse
import base64
from datetime import datetime,timedelta
from time import sleep
import time
import os
import discord
from dotenv import load_dotenv
import requests
#import MySQLdb
from discord.ext.tasks import loop
#Initialize discord client
client = discord.Client()
#default directory for .env file is the current directory
#if you set .env in different directory, put the directory address load_dotenv("directory_of_.env)
load_dotenv()
#take environment variables from .env.
TOKEN = os.getenv('DISCORD_TOKEN')
#To strip HTML tags
def _remove_html_tags(html):
p = re.compile(r'<[^<]*?/?>')
return p.sub('', html)
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context
#Create request and open URL to read
_opener = urllib2.build_opener()
_opener.addheaders = [('User-agent', 'Mozilla/5.0')]
_events_calendar = {}
_events_list_file = 'events.data'
_url = 'https://www.syracuse.com/'
logging.basicConfig(
filename='log_histopy.txt', level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s'
)
html = _opener.open(_url).read()
#Find articles and obtain neccessary information such as title, link, publish date..
articles = []
uls = BeautifulSoup(html, "html.parser").html.body.findAll('article')
for x in uls:
time1 = int(x.attrs['data-publishedon'])
dt_object = (datetime.fromtimestamp(time1) - timedelta(hours=5)).strftime('%Y-%m-%d %H:%M:%S')
atetime_object = datetime.strptime(dt_object, '%Y-%m-%d %H:%M:%S')
if ((atetime_object.hour == (datetime.today()- timedelta(hours=5)).hour) and (atetime_object.day == (datetime.today()- timedelta(hours=5)).day)) :
articles.append({'header':x.attrs['data-social-hed'],'link':x.attrs['data-source'],'thumb':x.attrs['data-image-thumb'],'publishedOnRaw':int(x.attrs['data-publishedon']), 'id': x.attrs['id']})
#Send articles to discord
if (len(articles) >= 1):
@loop(count=1)
async def send_articles():
channel = client.get_channel(#############)
for article in articles:
publishedOnRaw = article['publishedOnRaw']
publishedOn = (datetime.fromtimestamp(publishedOnRaw) - timedelta(hours=5))
embed=discord.Embed(title=article['header'], url=article['link'], description=str(publishedOn))
embed.set_thumbnail(url=article['thumb'])
await channel.send(embed=embed)
@send_articles.before_loop
async def before_send_articles():
await client.wait_until_ready() # Wait until bot is ready.
@send_articles.after_loop
async def after_send_articles():
await client.logout() # Make the bot log out.
send_articles.start()
client.run(TOKEN)
| 33.418605 | 199 | 0.707376 | 387 | 2,874 | 5.124031 | 0.428941 | 0.04236 | 0.025214 | 0.026223 | 0.058497 | 0.006051 | 0 | 0 | 0 | 0 | 0 | 0.006547 | 0.149617 | 2,874 | 85 | 200 | 33.811765 | 0.804828 | 0 | 0 | 0 | 0 | 0 | 0.146505 | 0.010883 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.266667 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77b07935b55d5c70675838e025530126172cd7e9 | 114 | py | Python | aula01/aula01.py | fabiolealsc/estudandoPython | be0e9211ba3d596a15a9427b612c537a102b858e | [
"MIT"
] | 3 | 2021-09-05T16:50:06.000Z | 2021-11-08T08:56:51.000Z | aula01/aula01.py | fabiolealsc/estudandoPython | be0e9211ba3d596a15a9427b612c537a102b858e | [
"MIT"
] | null | null | null | aula01/aula01.py | fabiolealsc/estudandoPython | be0e9211ba3d596a15a9427b612c537a102b858e | [
"MIT"
] | null | null | null | print("Hello, world")
def func():
pass
while True:
txt = input('Digite um valor: ')
print(eval(txt)) | 14.25 | 36 | 0.596491 | 16 | 114 | 4.25 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.236842 | 114 | 8 | 37 | 14.25 | 0.781609 | 0 | 0 | 0 | 0 | 0 | 0.252174 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0.166667 | 0 | 0 | 0.166667 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 3 |
77b13a9d54ea4e9754f97111a15aac0fd3b8b54f | 1,467 | py | Python | accounts/views.py | samaras/bytestore | 8e2f8dfc40af9cf2b387ef17820927e439bbbe33 | [
"MIT"
] | null | null | null | accounts/views.py | samaras/bytestore | 8e2f8dfc40af9cf2b387ef17820927e439bbbe33 | [
"MIT"
] | null | null | null | accounts/views.py | samaras/bytestore | 8e2f8dfc40af9cf2b387ef17820927e439bbbe33 | [
"MIT"
] | null | null | null | from django.shortcuts import render_to_response
from .forms import *
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout
from django.views.decorators.csrf import csrf_protect
from django.http import HttpResponseRedirect
from django.template import RequestContext
@csrf_protect
def register(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
password = form.cleaned_data['password1']
email = form.cleaned_data['email']
first_name = form.cleaned_data['first_name']
last_name = form.cleaned_data['last_name']
# Create user record
user = User.objects.create_user(username=username, email=email, first_name=first_name, last_name=last_name)
user.set_password(password)
user.save()
# Create Profile record & add user to group
is_customer = form.cleaned_data['is_customer']
profile = Profile(is_customer=is_customer, user=user)
profile.save()
if is_customer:
pass
else:
pass
# Auto login user
return HttpResponseRedirect('/register/success/')
else:
form = RegistrationForm()
variables = RequestContext(request, {'form': form})
return render_to_response('accounts/register.html', variables,)
def logout_page(request):
logout(request)
return HttpResponseRedirect('/')
@login_required
def profile(request):
return render_to_response('accounts/profile.html') | 28.211538 | 110 | 0.759373 | 187 | 1,467 | 5.780749 | 0.315508 | 0.055504 | 0.083256 | 0.038853 | 0.055504 | 0 | 0 | 0 | 0 | 0 | 0 | 0.000792 | 0.139741 | 1,467 | 52 | 111 | 28.211538 | 0.855784 | 0.051806 | 0 | 0.105263 | 0 | 0 | 0.087896 | 0.03098 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078947 | false | 0.105263 | 0.184211 | 0.026316 | 0.368421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
77b28772f0ad4a9c4563ad6fa4f14122fc956e8e | 618 | py | Python | py_tasc.py | hcpss-banderson/py-tasc | 7a7b4b3743a1375707647574a3a40c374b18c73a | [
"MIT"
] | 1 | 2017-07-19T12:37:31.000Z | 2017-07-19T12:37:31.000Z | py_tasc.py | HCPSS/py-tasc | 7a7b4b3743a1375707647574a3a40c374b18c73a | [
"MIT"
] | 1 | 2016-06-09T18:40:05.000Z | 2016-06-09T18:40:05.000Z | py_tasc.py | hcpss-banderson/py-tasc | 7a7b4b3743a1375707647574a3a40c374b18c73a | [
"MIT"
] | 1 | 2021-05-25T12:24:33.000Z | 2021-05-25T12:24:33.000Z | #!/usr/bin/env python
import os
from assembler import Assembler
from optionresolver import OptionResolver
from patchers import PatchManager, PatchPatcher
# Get the user supplied options
options = OptionResolver()
# Assemble the source code
assembler = Assembler(options.manifest()["projects"], options.destination(), options.extra_parameters())
assembler.assemble()
# Apply any patches
patchmanager = PatchManager(options.manifest()["patches"], options.destination())
patch_base = os.path.dirname(options.manifest_location())
patchmanager.add_patcher("patch_file", PatchPatcher(patch_base))
patchmanager.patch()
| 32.526316 | 104 | 0.800971 | 70 | 618 | 6.985714 | 0.514286 | 0.092025 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.092233 | 618 | 18 | 105 | 34.333333 | 0.871658 | 0.150485 | 0 | 0 | 0 | 0 | 0.047985 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.363636 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
77b37d9171a1365ad576494257206d89ba25adf1 | 111 | py | Python | session_05/dictionaries.py | dravate/spark_python_course | 519389fdb21d78cd6d19e1ad2f7c782bc1449a83 | [
"MIT"
] | null | null | null | session_05/dictionaries.py | dravate/spark_python_course | 519389fdb21d78cd6d19e1ad2f7c782bc1449a83 | [
"MIT"
] | null | null | null | session_05/dictionaries.py | dravate/spark_python_course | 519389fdb21d78cd6d19e1ad2f7c782bc1449a83 | [
"MIT"
] | 1 | 2021-07-27T14:16:39.000Z | 2021-07-27T14:16:39.000Z | d1 = {}
d2 = {'one': 1, 'two': 2 }
d3 = dict(one=1, two=2)
d4 = dict((1, 2), (3, 4))
d5 = dict({1:2, 3:4})
| 11.1 | 26 | 0.414414 | 24 | 111 | 1.916667 | 0.5 | 0.173913 | 0.304348 | 0.347826 | 0.347826 | 0 | 0 | 0 | 0 | 0 | 0 | 0.204819 | 0.252252 | 111 | 9 | 27 | 12.333333 | 0.349398 | 0 | 0 | 0 | 0 | 0 | 0.054054 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
77b62d71a2e2a81d7bb14aa77dd472363e132b1c | 593 | py | Python | deploy/env_var.py | john850512/TranslatedPudding | 64e5f2b5fe389c598daa15ff78dbc5ede3e0799e | [
"MIT"
] | 3 | 2019-05-20T06:36:14.000Z | 2020-05-15T03:58:16.000Z | deploy/env_var.py | john850512/TranslatedPudding | 64e5f2b5fe389c598daa15ff78dbc5ede3e0799e | [
"MIT"
] | null | null | null | deploy/env_var.py | john850512/TranslatedPudding | 64e5f2b5fe389c598daa15ff78dbc5ede3e0799e | [
"MIT"
] | null | null | null | CHANNEL_ACCESS_TOKEN = r'YOUR CHANNEL ACCESS TOKEN'
CHANNEL_SECRET = r'YOUR CHANNEL SECRET'
STR_CURRENT_STATUS_RESPOND = ("็ฎๅ็ๆ
๏ผใ{current_status}ใ\n" +
"1.่ผธๅ
ฅใๅไธ็ฟป่ญฏๅธไธใ้ๅๅ่ฝ\n" +
"2.่ผธๅ
ฅใๅธไธๆถๅๅฎไบใ้้ๅ่ฝ"
)
STR_CURRENT_STATUS = r'็ฟป่ญฏๅธไธ'
STR_ACTIVATE_BOT = r'ๅไธ็ฟป่ญฏๅธไธ'
STR_DEACTIVATE_BOT = r'ๅธไธๆถๅๅฎไบ'
STR_ACTIVATE_BOT_RESPOND = r'ๅไธ็ฟป่ญฏๅธไธไนๅพ๏ผ็ผ็พๅ
จ่บซไผผไนๅ
ๆปฟ็ฅๅฅ็ๅ้๏ผ'
STR_DUPLICATE_ACTIVATE_BOT_RESPOND = r'ๅทฒ็ถๅ้ไธๆฌกไบ๏ผไธๅฏไปฅๅคช่ฒชๅฟ๏ผ'
STR_DEACTIVATE_BOT_RESPOND = r'็ฟป่ญฏๅธไธ่ขซๆถๅ็ๅทฎไธๅคไบ๏ผ็ฅๅฅ็ๅ้ๆผธๆผธ้ๅป'
STR_DUPLICATE_DEACTIVATE_BOT_RESPOND = r'ๆฒๆๅธไธๅฏไปฅๆถๅ๏ผ๏ผฑ๏ผฑ' | 45.615385 | 59 | 0.686341 | 80 | 593 | 4.8 | 0.425 | 0.104167 | 0.114583 | 0.098958 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00431 | 0.217538 | 593 | 13 | 60 | 45.615385 | 0.814655 | 0 | 0 | 0 | 0 | 0 | 0.319865 | 0.117845 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77b773bb110aacc218f4ca773acae2c751ddf7a7 | 691 | py | Python | src/duet/sv_calling.py | yekaizhou/duet | ab49323992d3a5f6c21a10bd7114525a31fb0b8b | [
"BSD-3-Clause"
] | 7 | 2021-08-12T08:26:18.000Z | 2022-02-23T18:13:53.000Z | src/duet/sv_calling.py | yekaizhou/duet | ab49323992d3a5f6c21a10bd7114525a31fb0b8b | [
"BSD-3-Clause"
] | null | null | null | src/duet/sv_calling.py | yekaizhou/duet | ab49323992d3a5f6c21a10bd7114525a31fb0b8b | [
"BSD-3-Clause"
] | null | null | null | # coding=utf-8
import logging
import os
import time
def sv_calling(home, ref_path, aln_path, cls_thres, svlen_thres):
lines = '*************************'
logging.info(lines + ' SV CALLING STARTED ' + lines)
starttime = time.time()
sv_calling_home = home + '/sv_calling/'
os.system('mkdir ' + sv_calling_home)
os.system('svim alignment ' + sv_calling_home + ' ' + aln_path + ' ' + ref_path + ' --min_sv_size ' + \
str(svlen_thres) + ' --read_names --minimum_depth 0 --minimum_score 0 --cluster_max_distance ' + \
str(cls_thres))
logging.info(lines + ' SV CALLING COMPLETED IN ' + str(round(time.time() - starttime, 3)) + 's ' + lines) | 43.1875 | 112 | 0.615051 | 90 | 691 | 4.466667 | 0.466667 | 0.156716 | 0.129353 | 0.089552 | 0.124378 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007353 | 0.212735 | 691 | 16 | 113 | 43.1875 | 0.731618 | 0.017366 | 0 | 0 | 0 | 0 | 0.287611 | 0.069322 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.230769 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77b834ef3b47095a3ef4e2276e9ea11ba4bc2b31 | 622 | py | Python | python/08/a4988.py | matsujirushi/raspi_parts_kouryaku | 35cd6f34d21c5e3160636671175fa8d5aff2d4dc | [
"Apache-2.0"
] | 6 | 2022-03-05T02:36:57.000Z | 2022-03-12T12:31:27.000Z | python/08/a4988.py | matsujirushi/raspi_parts_kouryaku | 35cd6f34d21c5e3160636671175fa8d5aff2d4dc | [
"Apache-2.0"
] | null | null | null | python/08/a4988.py | matsujirushi/raspi_parts_kouryaku | 35cd6f34d21c5e3160636671175fa8d5aff2d4dc | [
"Apache-2.0"
] | null | null | null | import pigpio
import time
ROTATION = 2
STEP_PER_ROTATE = 200
MICROSTEP = 16
ENABLE = 10
MS1 = 9
MS2 = 11
MS3 = 5
RESET = 6
SLEEP = 13
STEP = 19
DIR = 26
WAIT = 0.005
pi = pigpio.pi()
for pin in [ENABLE, MS1, MS2, MS3, RESET, SLEEP, STEP, DIR]:
pi.set_mode(pin, pigpio.OUTPUT)
pi.write(RESET, 0)
pi.write(SLEEP, 1)
pi.write(MS1, 1)
pi.write(MS2, 1)
pi.write(MS3, 1)
pi.write(DIR, 0)
pi.write(STEP, 0)
pi.write(ENABLE, 0)
time.sleep(0.001)
pi.write(RESET, 1)
for i in range(STEP_PER_ROTATE * MICROSTEP * ROTATION):
pi.write(STEP, 1)
time.sleep(WAIT / 2)
pi.write(STEP, 0)
time.sleep(WAIT / 2)
| 14.136364 | 60 | 0.651125 | 116 | 622 | 3.448276 | 0.353448 | 0.1925 | 0.08 | 0.06 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098394 | 0.199357 | 622 | 43 | 61 | 14.465116 | 0.704819 | 0 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0625 | 0 | 0.0625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77b89c8c2551fa8fcc0927345358401aa9bd99ab | 191 | py | Python | Merlin_Motion_Control.py | TomQD-94/merlin-motion-control | 506ef612812be04074a9c01793c86c951578c67c | [
"MIT"
] | 1 | 2022-02-21T11:12:49.000Z | 2022-02-21T11:12:49.000Z | Merlin_Motion_Control.py | TomQD-94/merlin-motion-control | 506ef612812be04074a9c01793c86c951578c67c | [
"MIT"
] | null | null | null | Merlin_Motion_Control.py | TomQD-94/merlin-motion-control | 506ef612812be04074a9c01793c86c951578c67c | [
"MIT"
] | 1 | 2021-04-27T08:35:41.000Z | 2021-04-27T08:35:41.000Z | #!/Users/richard/anaconda3/bin/python3
"""Launch app using Anaconda 3."""
from views.interface import MerlinMotionControlApp
if __name__ == "__main__":
MerlinMotionControlApp().run()
| 19.1 | 50 | 0.748691 | 20 | 191 | 6.75 | 0.95 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017857 | 0.120419 | 191 | 9 | 51 | 21.222222 | 0.785714 | 0.34555 | 0 | 0 | 0 | 0 | 0.067227 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 4 |
77b8deb8a471712792ddcd411f464a17a0cb2bf4 | 1,917 | py | Python | tools/c7n_gcp/c7n_gcp/resources/dataflow.py | vkubyshko/cloud-custodian | e5e3a0f8b5c85adcbec212d780b453047fb6f4d1 | [
"Apache-2.0"
] | 2,415 | 2018-12-04T00:37:58.000Z | 2022-03-31T12:28:56.000Z | tools/c7n_gcp/c7n_gcp/resources/dataflow.py | vkubyshko/cloud-custodian | e5e3a0f8b5c85adcbec212d780b453047fb6f4d1 | [
"Apache-2.0"
] | 3,272 | 2018-12-03T23:58:17.000Z | 2022-03-31T21:15:32.000Z | tools/c7n_gcp/c7n_gcp/resources/dataflow.py | staxio/cloud-custodian | 24ed5d8f09bc37ff76184aae97a1ef577a69a41b | [
"Apache-2.0"
] | 773 | 2018-12-06T09:43:23.000Z | 2022-03-30T20:44:43.000Z | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import jmespath
from googleapiclient.errors import HttpError
from c7n_gcp.provider import resources
from c7n_gcp.query import QueryResourceManager, TypeInfo
@resources.register('dataflow-job')
class DataflowJob(QueryResourceManager):
"""GCP resource: https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs
"""
class resource_type(TypeInfo):
service = 'dataflow'
version = 'v1b3'
component = 'projects.jobs'
enum_spec = ('aggregated', 'jobs[]', None)
scope_key = 'projectId'
name = id = 'name'
get_requires_event = True
default_report_fields = [
'name', 'currentState', 'createTime', 'location']
permissions = ('dataflow.jobs.list',)
@staticmethod
def get(client, event):
return client.execute_command(
'get', {
'projectId': jmespath.search('resource.labels.project_id', event),
'jobId': jmespath.search('protoPayload.request.job_id', event)
}
)
def resources(self, query=None):
query_filter = 'ACTIVE'
if self.data.get('query'):
query_filter = self.data['query'][0].get('filter', 'ACTIVE')
return super(DataflowJob, self).resources(query={'filter': query_filter})
def augment(self, resources):
client = self.get_client()
results = []
for r in resources:
ref = {
'jobId': r['id'],
'projectId': r['projectId'],
'view': 'JOB_VIEW_ALL'
}
try:
results.append(
client.execute_query(
'get', verb_arguments=ref))
except HttpError:
results.append(r)
return results
| 32.491525 | 93 | 0.571205 | 186 | 1,917 | 5.77957 | 0.510753 | 0.04093 | 0.018605 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006818 | 0.311424 | 1,917 | 58 | 94 | 33.051724 | 0.807576 | 0.087115 | 0 | 0 | 0 | 0 | 0.15261 | 0.030407 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065217 | false | 0 | 0.086957 | 0.021739 | 0.26087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77b987eaf14f75d36f539722f833433781e4d25e | 12,777 | py | Python | evolution/robot.py | gpatsiaouras/Robot-Simulator | 4676bb342ca2a1ebcabfb4834f42da61610bacdf | [
"MIT"
] | null | null | null | evolution/robot.py | gpatsiaouras/Robot-Simulator | 4676bb342ca2a1ebcabfb4834f42da61610bacdf | [
"MIT"
] | 1 | 2022-03-12T00:58:01.000Z | 2022-03-12T00:58:01.000Z | evolution/robot.py | gpatsiaouras/Robot-Simulator | 4676bb342ca2a1ebcabfb4834f42da61610bacdf | [
"MIT"
] | null | null | null | import numpy as np
from math import hypot as hyp
class Robot:
def __init__(self, diameter, initial_theta, initial_position):
# Robot specifications
self.diameter = diameter
self.radius = int(diameter / 2)
self.position = initial_position
# Rotation is in rads
self.theta = initial_theta
# Velocity of wheel in pixes/time
self.MAX_SPEED = 10
self.MIN_SPEED = -10
self.left_wheel_velocity = 0
self.right_wheel_velocity = 0
# Sensors
self.sensors_values = []
self.sensors_parameters = np.zeros((12, 2))
self.init_sensors()
# obstacles for sensor values
self.obstacles_coords = []
self.obstacles_parameters = np.zeros((4, 2))
# Collision Data (for evolution algorithm)
self.collisions = 0
def reset(self, theta, position):
self.theta = theta
self.position = position
self.update_sensor_values()
self.collisions = 0
self.left_wheel_velocity = 0
self.right_wheel_velocity = 0
def update_sensor_values(self):
count = 0
for angle in range(0, 360, 30):
# sensor origin coords
self.sensors_coords[count, 0] = self.position[0] + self.radius * np.cos(self.theta + np.radians(angle))
self.sensors_coords[count, 1] = self.position[1] + self.radius * np.sin(self.theta + np.radians(angle))
# sensor tips coords
self.sensors_coords[count, 2] = self.position[0] + self.sens_radius * np.cos(self.theta + np.radians(angle))
self.sensors_coords[count, 3] = self.position[1] + self.sens_radius * np.sin(self.theta + np.radians(angle))
self.sensors_values[count] = hyp(self.sensors_coords[count, 2] - self.sensors_coords[count, 0],
self.sensors_coords[count, 3] - self.sensors_coords[count, 1])
# sensors functions parameters
# slope a
self.sensors_parameters[count, 0] = (self.sensors_coords[count, 3] - self.sensors_coords[count, 1]) / \
(self.sensors_coords[count, 2] - self.sensors_coords[count, 0])
# intercept b
self.sensors_parameters[count, 1] = self.sensors_coords[count, 1] - (
self.sensors_parameters[count, 0] * self.sensors_coords[count, 0])
count = count + 1
def move(self):
# Store old position before applying kinematics
old_position = [self.position[0], self.position[1]]
if self.left_wheel_velocity != self.right_wheel_velocity:
# if self.left_wheel_velocity != self.right_wheel_velocity:
# Calculate ฯ - angular velocity and change rotation of the robot
angular_velocity = (self.left_wheel_velocity - self.right_wheel_velocity) / self.diameter
# Keep theta from exploding
self.theta %= 2 * np.pi
R = (self.diameter / 2) * (self.left_wheel_velocity + self.right_wheel_velocity) / (
self.left_wheel_velocity - self.right_wheel_velocity)
ICCx = self.position[0] - R * np.sin(self.theta)
ICCy = self.position[1] + R * np.cos(self.theta)
matrix_a = np.matrix([[np.cos(angular_velocity), -np.sin(angular_velocity), 0],
[np.sin(angular_velocity), np.cos(angular_velocity), 0], [0, 0, 1]])
vector_a = np.array([self.position[0] - ICCx, self.position[1] - ICCy, self.theta])
vector_b = np.array([ICCx, ICCy, angular_velocity])
new_pos_rot = matrix_a.dot(vector_a) + vector_b
self.position = [new_pos_rot.item((0, 0)), new_pos_rot.item((0, 1))]
self.theta = new_pos_rot.item((0, 2))
elif self.right_wheel_velocity != 0:
self.position[0] = self.position[0] + (self.right_wheel_velocity * np.cos(self.theta))
self.position[1] = self.position[1] + (self.right_wheel_velocity * np.sin(self.theta))
# Check if the new move caused a collision
if self.check_collision():
# Undo the move
self.position = old_position
# Move according to collision handling algorithm
self.move_with_wall()
# Increment collisions counter
self.collisions += 1
# update sensors
self.update_sensor_values()
def move_with_wall(self):
cap_hor = 1
cap_ver = 1
velocity_hor = 0
velocity_ver = 0
for obstacle_id in range(len(self.obstacles_parameters)):
# print("Obstacle {0}: {1}".format(obstacle_id, self.obstacles_parameters[obstacle_id][0]))
# Vertical Obstacle
if np.isinf(self.obstacles_parameters[obstacle_id][0]):
distance = np.abs(self.position[0] - self.obstacles_coords[obstacle_id][0])
is_inside_the_limits_of_the_line = [
self.obstacles_coords[obstacle_id][3] < self.position[1] < self.obstacles_coords[obstacle_id][1]]
# Horizontal Obstacle
elif self.obstacles_parameters[obstacle_id][0] == 0:
distance = np.abs(self.position[1] - self.obstacles_coords[obstacle_id][1])
is_inside_the_limits_of_the_line = [
self.obstacles_coords[obstacle_id][2] > self.position[0] > self.obstacles_coords[obstacle_id][0]]
else:
distance = np.abs(-self.obstacles_parameters[obstacle_id][0] * self.position[0] + self.position[1] -
self.obstacles_parameters[obstacle_id][1]) / \
np.sqrt((-self.obstacles_parameters[obstacle_id][0]) ** 2 + 1)
is_inside_the_limits_of_the_line = False
if is_inside_the_limits_of_the_line and distance <= self.radius + 10:
velocity_hor = np.cos(self.theta) * (self.right_wheel_velocity + self.left_wheel_velocity) / 2
velocity_ver = np.sin(self.theta) * (self.right_wheel_velocity + self.left_wheel_velocity) / 2
if self.obstacles_parameters[obstacle_id][0] == 0:
# self.velocity_ver = 0
cap_ver = 0
if np.isinf(self.obstacles_parameters[obstacle_id][0]):
# self.velocity_hor = 0
cap_hor = 0
velocity_hor = velocity_hor * cap_hor
velocity_ver = velocity_ver * cap_ver
self.position[0] = self.position[0] + velocity_hor
self.position[1] = self.position[1] + velocity_ver
def increment_left_wheel(self):
if self.left_wheel_velocity + 1 <= self.MAX_SPEED:
self.left_wheel_velocity += 1
def decrement_left_wheel(self):
if self.left_wheel_velocity - 1 >= self.MIN_SPEED:
self.left_wheel_velocity -= 1
def increment_right_wheel(self):
if self.right_wheel_velocity + 1 <= self.MAX_SPEED:
self.right_wheel_velocity += 1
def decrement_right_wheel(self):
if self.right_wheel_velocity - 1 >= self.MIN_SPEED:
self.right_wheel_velocity -= 1
def increment_both_wheels(self):
self.increment_left_wheel()
self.increment_right_wheel()
def decrement_both_wheels(self):
self.decrement_left_wheel()
self.decrement_right_wheel()
def stop_motors(self):
self.right_wheel_velocity = 0
self.left_wheel_velocity = 0
def init_sensors(self):
# 12 sensors perimetrically, 30o degrees between them
# sensor 0 is the one in front of the robot.
# Values go from 0 to 200, 200 being out of reach
self.sensors_values = [0 for i in range(12)]
self.sensors_coords = np.zeros((12, 4))
# self.sens_radius = 3 * self.radius
self.sens_radius = 100 + self.radius
self.update_sensor_values()
def set_obstacles(self, obstacles_coords, obstacles_params):
self.obstacles_coords = obstacles_coords
self.obstacles_parameters = obstacles_params
def check_sensors(self):
for sensor_id in range(len(self.sensors_coords)):
for obstacle_id in range(len(self.obstacles_coords)):
intersection_point = self.getIntersectingPoint(self.sensors_coords[sensor_id],
self.obstacles_coords[obstacle_id])
if intersection_point:
self.sensors_values[sensor_id] = np.sqrt(
(intersection_point[0] - self.sensors_coords[sensor_id, 0]) ** 2 + (
intersection_point[1] - self.sensors_coords[sensor_id, 1]) ** 2)
if self.sensors_values[sensor_id] < 100:
self.sensors_coords[sensor_id, 2] = intersection_point[0]
self.sensors_coords[sensor_id, 3] = intersection_point[1]
def check_collision(self):
for obstacle_id in range(len(self.obstacles_parameters)):
if np.isinf(self.obstacles_parameters[obstacle_id][0]):
distance = np.abs(self.position[0] - self.obstacles_coords[obstacle_id][0])
if self.position[1] < min(self.obstacles_coords[obstacle_id][1], self.obstacles_coords[obstacle_id][3]):
is_not_in_range = False
elif self.position[1] > max(self.obstacles_coords[obstacle_id][1], self.obstacles_coords[obstacle_id][3]):
is_not_in_range = False
else:
is_not_in_range = True
elif self.obstacles_parameters[obstacle_id][0] == 0:
distance = np.abs(self.position[1] - self.obstacles_coords[obstacle_id][1])
if self.position[0] < min(self.obstacles_coords[obstacle_id][0], self.obstacles_coords[obstacle_id][2]):
is_not_in_range = False
elif self.position[0] > max(self.obstacles_coords[obstacle_id][0], self.obstacles_coords[obstacle_id][2]):
is_not_in_range = False
else:
is_not_in_range = True
else:
distance = np.abs(-self.obstacles_parameters[obstacle_id][0] * self.position[0] + self.position[1] -
self.obstacles_parameters[obstacle_id][1]) / \
np.sqrt((-self.obstacles_parameters[obstacle_id][0]) ** 2 + 1)
is_not_in_range = True
if is_not_in_range and distance <= self.radius:
return True
return False
def getIntersectingPoint(self, line1, line2):
""" If the given lines are intersecting, return the position of this intersection, otherwise false """
line1_p1 = [line1[0], line1[1]]
line1_p2 = [line1[2], line1[3]]
line2_p1 = [line2[0], line2[1]]
line2_p2 = [line2[2], line2[3]]
# Check if a line intersection is possible within range
if ((line1_p1[0] > line2_p1[0] and line1_p1[0] > line2_p2[0] and line1_p2[0] > line2_p1[0] and line1_p2[0] >
line2_p2[0]) or
(line1_p1[0] < line2_p1[0] and line1_p1[0] < line2_p2[0] and line1_p2[0] < line2_p1[0] and line1_p2[0] <
line2_p2[0]) or
(line1_p1[1] > line2_p1[1] and line1_p1[1] > line2_p2[1] and line1_p2[1] > line2_p1[1] and line1_p2[1] >
line2_p2[1]) or
(line1_p1[1] < line2_p1[1] and line1_p1[1] < line2_p2[1] and line1_p2[1] < line2_p1[1] and line1_p2[1] <
line2_p2[1])):
return False
# Get axis differences
diffX = (line1_p1[0] - line1_p2[0], line2_p1[0] - line2_p2[0])
diffY = (line1_p1[1] - line1_p2[1], line2_p1[1] - line2_p2[1])
# Get intersection
d = np.linalg.det([diffX, diffY])
if d == 0:
return False
det = (np.linalg.det([line1_p1, line1_p2]), np.linalg.det([line2_p1, line2_p2]))
x = np.linalg.det([det, diffX]) / d
y = np.linalg.det([det, diffY]) / d
# Check if it is within range
margin = 0.0001
if (x < min(line1_p1[0], line1_p2[0]) - margin or
x > max(line1_p1[0], line1_p2[0]) + margin or
y < min(line1_p1[1], line1_p2[1]) - margin or
y > max(line1_p1[1], line1_p2[1]) + margin or
x < min(line2_p1[0], line2_p2[0]) - margin or
x > max(line2_p1[0], line2_p2[0]) + margin or
y < min(line2_p1[1], line2_p2[1]) - margin or
y > max(line2_p1[1], line2_p2[1]) + margin):
return False
return x, y
| 46.126354 | 122 | 0.591688 | 1,652 | 12,777 | 4.340194 | 0.110169 | 0.068898 | 0.055649 | 0.052162 | 0.590516 | 0.518271 | 0.461506 | 0.422873 | 0.36848 | 0.295676 | 0 | 0.04381 | 0.301479 | 12,777 | 276 | 123 | 46.293478 | 0.759552 | 0.089614 | 0 | 0.243523 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088083 | false | 0 | 0.010363 | 0 | 0.134715 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77b9b6901e4cb4106030a4e8cd51ce978af0e6bd | 2,613 | py | Python | backblaze/tests/blocking/test_file.py | WardPearce/aiob2 | 3dcff9c3aa7612ce7b43375fca379c1358121a4a | [
"MIT"
] | null | null | null | backblaze/tests/blocking/test_file.py | WardPearce/aiob2 | 3dcff9c3aa7612ce7b43375fca379c1358121a4a | [
"MIT"
] | null | null | null | backblaze/tests/blocking/test_file.py | WardPearce/aiob2 | 3dcff9c3aa7612ce7b43375fca379c1358121a4a | [
"MIT"
] | 1 | 2019-07-16T03:38:49.000Z | 2019-07-16T03:38:49.000Z | import unittest
from uuid import uuid4
from os import path
from .client import CLIENT
from ...settings import (
BucketSettings,
UploadSettings,
PartSettings,
CopyFileSettings
)
from ...models.file import FileModel, PartModel
from ...bucket.blocking import BlockingFile
class TestBlockingFile(unittest.TestCase):
def test_file(self):
_, bucket = CLIENT.create_bucket(BucketSettings(
"file test {}".format(uuid4())
))
local_path = path.join(
path.dirname(path.realpath(__file__)),
"../test_file.png"
)
with open(local_path, "rb") as f:
data = f.read()
file_data, file = bucket.upload(
UploadSettings(
name="ใฆใฃใผใ.png"
),
data=data
)
self.assertIsInstance(file_data, FileModel)
self.assertIsInstance(file, BlockingFile)
self.assertTrue(type(file.download()) == bytes)
copy_data, copy_file = file.copy(CopyFileSettings(
"copied file.png"
))
self.assertIsInstance(copy_data, FileModel)
self.assertIsInstance(copy_file, BlockingFile)
copy_file.delete()
file.delete(
file_data.file_name
)
local_path = path.join(
path.dirname(path.realpath(__file__)),
"../parts_test"
)
details, file = bucket.create_part(PartSettings(
"test part.png"
))
parts = file.parts()
data = b""
with open(local_path, "rb") as f:
data = f.read()
chunk_size = 5000000
for chunk in range(0, len(data), chunk_size):
parts.data(data[chunk:chunk + chunk_size])
for part, _ in file.parts().list():
self.assertIsInstance(part, PartModel)
parts.finish()
file.delete(details.file_name)
details, file = bucket.create_part(PartSettings(
"test part upload.png"
))
parts = file.parts()
parts.file(local_path)
parts.finish()
file.delete(details.file_name)
data, file = bucket.upload_file(
UploadSettings("test part.bin"),
local_path
)
file.delete(data.file_name)
local_path = path.join(
path.dirname(path.realpath(__file__)),
"../test_file.png"
)
data, file = bucket.upload_file(
UploadSettings("test file upload.png"),
local_path
)
file.delete(data.file_name)
bucket.delete()
| 22.921053 | 58 | 0.565251 | 268 | 2,613 | 5.335821 | 0.246269 | 0.05035 | 0.027273 | 0.035664 | 0.377622 | 0.377622 | 0.377622 | 0.233566 | 0.167832 | 0.167832 | 0 | 0.005708 | 0.329506 | 2,613 | 113 | 59 | 23.123894 | 0.810502 | 0 | 0 | 0.37037 | 0 | 0 | 0.057405 | 0 | 0 | 0 | 0 | 0 | 0.074074 | 1 | 0.012346 | false | 0 | 0.08642 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77bb0202fbcb60692c326fef16e93e7f15657559 | 616 | py | Python | tests/test_dataset.py | kilsenp/triplet-reid-pytorch | 0cd2c2ac638d22745483b3d12ccb5c160f0bf3c3 | [
"MIT"
] | 41 | 2018-06-21T09:36:23.000Z | 2021-05-07T22:06:25.000Z | tests/test_dataset.py | kilsen512/triplet-reid-pytorch | 0cd2c2ac638d22745483b3d12ccb5c160f0bf3c3 | [
"MIT"
] | 3 | 2018-06-20T10:35:32.000Z | 2020-11-17T20:16:03.000Z | tests/test_dataset.py | kilsen512/triplet-reid-pytorch | 0cd2c2ac638d22745483b3d12ccb5c160f0bf3c3 | [
"MIT"
] | 10 | 2018-05-18T07:34:56.000Z | 2021-01-16T00:02:12.000Z | import sys
from os import path
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
import unittest
from csv_dataset import *
class TestMarket(unittest.TestCase):
def test_make_dataset(self):
csv_file = "~/Projects/cupsizes/data/market1501_train.csv"
data_dir = "~/Projects/triplet-reid-pytorch/datasets/Market-1501"
limit = 200
data = make_dataset(csv_file, data_dir, limit)
self.assertEqual(len(data), limit)
if __name__ == "__main__":
unittest.main()
| 30.8 | 97 | 0.600649 | 68 | 616 | 5.132353 | 0.558824 | 0.063037 | 0.08596 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025522 | 0.300325 | 616 | 19 | 98 | 32.421053 | 0.784223 | 0 | 0 | 0 | 0 | 0 | 0.170455 | 0.157468 | 0 | 0 | 0 | 0 | 0.071429 | 1 | 0.071429 | false | 0 | 0.285714 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77bb5640e0e164a57f7b3d1cb8fe5afab8675900 | 3,695 | py | Python | help/forms.py | pincoin/rakmai | d9daa399aff50712a86b2dec9d94e622237b25b0 | [
"MIT"
] | 11 | 2018-04-02T16:36:19.000Z | 2019-07-10T05:54:58.000Z | help/forms.py | pincoin/rakmai | d9daa399aff50712a86b2dec9d94e622237b25b0 | [
"MIT"
] | 22 | 2019-01-01T20:40:21.000Z | 2022-02-10T08:06:39.000Z | help/forms.py | pincoin/rakmai | d9daa399aff50712a86b2dec9d94e622237b25b0 | [
"MIT"
] | 4 | 2019-03-12T14:24:37.000Z | 2022-01-07T16:20:22.000Z | from crispy_forms.helper import (
FormHelper, Layout
)
from crispy_forms.layout import (
HTML, Fieldset, Submit
)
from django import forms
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from shop import models
class FaqMessageAdminForm(forms.ModelForm):
class Meta:
model = models.FaqMessage
fields = ('category', 'title', 'content', 'store', 'position')
class NoticeMessageAdminForm(forms.ModelForm):
class Meta:
model = models.NoticeMessage
fields = ('category', 'title', 'content', 'store')
class CustomerQuestionForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.store_code = kwargs.pop('store_code', 'default')
self.page = kwargs.pop('page', 1)
super(CustomerQuestionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.include_media = False
self.helper.form_class = 'form'
self.helper.layout = Layout(
Fieldset(
'', # Hide the legend of fieldset (HTML tag)
'category',
'title',
'content',
),
HTML('''
<button type="submit" class="btn btn-block btn-lg btn-primary my-2">
<i class="fas fa-pencil-alt"></i> {}
</button>
<hr>
<a href="{}?page={}" class="btn btn-block btn-lg btn-outline-secondary my-2">
<i class="fas fa-list"></i> {}
</a>
'''.format(_('Write'), reverse('help:question-list', args=(self.store_code,)), self.page, _('List'))),
)
class Meta:
model = models.CustomerQuestion
fields = (
'category', 'title', 'content', # 'owner', 'store'
)
class TestimonialsForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.store_code = kwargs.pop('store_code', 'default')
self.page = kwargs.pop('page', 1)
super(TestimonialsForm, self).__init__(*args, **kwargs)
self.fields['title'].help_text = False
self.helper = FormHelper()
self.helper.include_media = False
self.helper.form_class = 'form'
self.helper.layout = Layout(
Fieldset(
'', # Hide the legend of fieldset (HTML tag)
'title',
'content',
),
HTML('''
<button type="submit" class="btn btn-block btn-lg btn-primary my-2">
<i class="fas fa-pencil-alt"></i> {}
</button>
<hr>
<a href="{}?page={}" class="btn btn-block btn-lg btn-outline-secondary my-2">
<i class="fas fa-list"></i> {}
</a>
'''.format(_('Write'), reverse('help:testimonials-list', args=(self.store_code,)), self.page, _('List'))),
)
class Meta:
model = models.Testimonials
fields = (
'title', 'content', # 'owner', 'store'
)
class TestimonialsAnswerForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.store_code = kwargs.pop('store_code', 'default')
self.testimonial = kwargs.pop('testimonial', 0)
super(TestimonialsAnswerForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = reverse('help:testimonials-answer', args=(self.store_code, self.testimonial))
self.helper.add_input(Submit('submit', _('Post Answer'), css_class='btn btn-lg btn-block btn-primary'))
self.helper.form_method = 'POST'
class Meta:
model = models.TestimonialsAnswer
fields = ['content']
| 31.853448 | 118 | 0.568065 | 394 | 3,695 | 5.19797 | 0.238579 | 0.058594 | 0.041016 | 0.048828 | 0.638672 | 0.561035 | 0.527832 | 0.527832 | 0.527832 | 0.495605 | 0 | 0.002654 | 0.286062 | 3,695 | 115 | 119 | 32.130435 | 0.773692 | 0.030041 | 0 | 0.555556 | 0 | 0.044444 | 0.286952 | 0.024588 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033333 | false | 0 | 0.066667 | 0 | 0.211111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77bc14e7c032c836a4310a414a8a832122f26169 | 642 | py | Python | json2csv.py | bdunnette/derby_name_generator | 51690025d7c4e1a0dba8da71fbb85570baaeeeca | [
"MIT"
] | null | null | null | json2csv.py | bdunnette/derby_name_generator | 51690025d7c4e1a0dba8da71fbb85570baaeeeca | [
"MIT"
] | 4 | 2020-01-28T22:57:49.000Z | 2020-11-13T18:22:23.000Z | json2csv.py | bdunnette/derby_name_generator | 51690025d7c4e1a0dba8da71fbb85570baaeeeca | [
"MIT"
] | null | null | null | import csv
import json
json_files = ['generated_names', 'registered_names', 'used_names']
for jf in json_files:
infile = open('data/{}.json'.format(jf), 'r')
outfile = open('data/{}.csv'.format(jf), 'w')
print('{} > {}'.format(infile, outfile))
writer = csv.DictWriter(outfile, fieldnames=['name', 'registered'])
writer.writeheader()
for row in json.loads(infile.read()):
row_dict = {'name': row}
if jf == 'registered_names':
row_dict['registered'] = 1
else:
row_dict['registered'] = 0
print(row, row_dict)
writer.writerow(row_dict)
| 32.1 | 72 | 0.582555 | 76 | 642 | 4.776316 | 0.434211 | 0.096419 | 0.093664 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004167 | 0.252336 | 642 | 19 | 73 | 33.789474 | 0.752083 | 0 | 0 | 0 | 1 | 0 | 0.203852 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.117647 | 0 | 0.117647 | 0.117647 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77bdb223eed0186fb8d9de25b3c3b87e0f4d5217 | 4,013 | py | Python | file_manager/migrations/0059_auto_20220208_1641.py | xiaofengxie128/Proteomic-Data-Manager | 79756c7021b1d5e4cc4cdb26d741f6ea18846a02 | [
"Apache-2.0"
] | null | null | null | file_manager/migrations/0059_auto_20220208_1641.py | xiaofengxie128/Proteomic-Data-Manager | 79756c7021b1d5e4cc4cdb26d741f6ea18846a02 | [
"Apache-2.0"
] | null | null | null | file_manager/migrations/0059_auto_20220208_1641.py | xiaofengxie128/Proteomic-Data-Manager | 79756c7021b1d5e4cc4cdb26d741f6ea18846a02 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.2.7 on 2022-02-08 23:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('file_manager', '0058_auto_20220118_1418'),
]
operations = [
migrations.AddField(
model_name='rawfile',
name='column_sn',
field=models.TextField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='rawfile',
name='spe_sn',
field=models.TextField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='maxquantqueue',
name='evidence_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/maxquant/2022/2/8'),
),
migrations.AlterField(
model_name='maxquantqueue',
name='other_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/maxquant/2022/2/8'),
),
migrations.AlterField(
model_name='maxquantqueue',
name='peptide_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/maxquant/2022/2/8'),
),
migrations.AlterField(
model_name='maxquantqueue',
name='protein_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/maxquant/2022/2/8'),
),
migrations.AlterField(
model_name='maxquantqueue',
name='setting_xml',
field=models.FileField(blank=True, null=True, upload_to='maxquant_xml/2022/2/8'),
),
migrations.AlterField(
model_name='msfraggerqueue',
name='ion_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/msfragger/2022/2/8'),
),
migrations.AlterField(
model_name='msfraggerqueue',
name='peptide_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/msfragger/2022/2/8'),
),
migrations.AlterField(
model_name='msfraggerqueue',
name='protein_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/msfragger/2022/2/8'),
),
migrations.AlterField(
model_name='msfraggerqueue',
name='psm_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/msfragger/2022/2/8'),
),
migrations.AlterField(
model_name='notefile',
name='notefile',
field=models.FileField(blank=True, null=True, upload_to='notefiles/2022/ 2/8'),
),
migrations.AlterField(
model_name='pdqueue',
name='consensus_method',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/proteindiscoverer/2022/2/8'),
),
migrations.AlterField(
model_name='pdqueue',
name='export_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/proteindiscoverer/2022/2/8'),
),
migrations.AlterField(
model_name='pdqueue',
name='processing_method',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/proteindiscoverer/2022/2/8'),
),
migrations.AlterField(
model_name='pdqueue',
name='result_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/proteindiscoverer/2022/2/8'),
),
migrations.AlterField(
model_name='rawfile',
name='note_file',
field=models.ManyToManyField(blank=True, to='file_manager.NoteFile'),
),
migrations.AlterField(
model_name='spectrominequeue',
name='result_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/spectromine/2022/2/8'),
),
]
| 38.586538 | 108 | 0.592574 | 411 | 4,013 | 5.644769 | 0.177616 | 0.069828 | 0.172414 | 0.2 | 0.830172 | 0.830172 | 0.786207 | 0.786207 | 0.786207 | 0.704741 | 0 | 0.04396 | 0.28009 | 4,013 | 103 | 109 | 38.961165 | 0.759086 | 0.011214 | 0 | 0.742268 | 1 | 0 | 0.223903 | 0.115734 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.010309 | 0 | 0.041237 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
77be2f55131b3349b56faffc31d3bd578f487785 | 9,334 | py | Python | tests/tests_user_demo_orders_add.py | Bitsgap/Bitsgap-api-py | 4102e578c7958dfabb945eb17fdff4ef6cab8fdd | [
"MIT"
] | 2 | 2021-05-12T01:14:24.000Z | 2021-12-08T14:36:00.000Z | tests/tests_user_demo_orders_add.py | Bitsgap/Bitsgap-rest-api-py | 4102e578c7958dfabb945eb17fdff4ef6cab8fdd | [
"MIT"
] | null | null | null | tests/tests_user_demo_orders_add.py | Bitsgap/Bitsgap-rest-api-py | 4102e578c7958dfabb945eb17fdff4ef6cab8fdd | [
"MIT"
] | null | null | null | import asyncio
import logging
from unittest import TestCase
from rest_api_py_lib import BitsgapClient
from tests.keys import public_key,private_key
class TestRestUserDemoOrdersAdd(TestCase):
""" Place order on demo market """
def test_user_demo_orders_add_valid_data(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'ok')
self.assertIn('time', result)
self.assertIn('data', result)
data = result['data']
self.assertIsNotNone(data)
# check fields
self.assertIn('id', data)
self.assertIn('price', data)
self.assertIn('amount', data)
self.assertIn('state', data)
self.assertIn('pair', data)
self.assertIn('type', data)
self.assertIn('side', data)
self.assertIn('uts', data)
self.assertIn('state', data)
if 'state' in data:
self.assertIn(data['state'], 'opened')
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid market """
def test_user_demo_orders_add_invalid_market(self):
async def run_test():
market = 'no_market'
pair = 'ETH_BTC'
price = '0.015'
amount = '0.1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid pair """
def test_user_demo_orders_add_invalid_pair(self):
async def run_test():
market = 'okex'
pair = 'no_pair'
price = '0.015'
amount = '0.1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid side """
def test_user_demo_orders_add_invalid_side(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '0.1'
side = 'invalid'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid type """
def test_user_demo_orders_add_invalid_type(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '0.1'
side = 'buy'
ord_type = 'invalid'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid price format """
def test_user_demo_orders_add_invalid_price_format(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0*250'
amount = '0.1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid zero price """
def test_user_demo_orders_add_invalid_price_zero(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0'
amount = '0.1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'ok')
self.assertIn('time', result)
self.assertIn('data', result)
data = result['data']
self.assertIsNotNone(data)
# check fields
self.assertIn('id', data)
self.assertIn('price', data)
self.assertIn('amount', data)
self.assertIn('state', data)
self.assertIn('pair', data)
self.assertIn('type', data)
self.assertIn('side', data)
self.assertIn('uts', data)
self.assertIn('state', data)
if 'state' in data:
self.assertIn(data['state'], 'opened')
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid amount format"""
def test_user_demo_orders_add_invalid_amount_format(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '100*'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid amount zero"""
def test_user_demo_orders_add_invalid_amount_zero(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '0'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
| 29.352201 | 85 | 0.561174 | 1,027 | 9,334 | 4.87147 | 0.079844 | 0.097142 | 0.046772 | 0.037977 | 0.946032 | 0.946032 | 0.941035 | 0.909055 | 0.870877 | 0.870877 | 0 | 0.009399 | 0.327512 | 9,334 | 317 | 86 | 29.444795 | 0.787637 | 0.005785 | 0 | 0.890909 | 0 | 0 | 0.065624 | 0 | 0 | 0 | 0 | 0 | 0.259091 | 1 | 0.040909 | false | 0 | 0.022727 | 0 | 0.068182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
77be68f59af465d19126dafee090ea62f5ef4125 | 2,176 | py | Python | runway/hooks/cleanup_s3.py | troyready/runway | 4fd299961a4b73df39e14f4f19a7236f7be17dd8 | [
"Apache-2.0"
] | null | null | null | runway/hooks/cleanup_s3.py | troyready/runway | 4fd299961a4b73df39e14f4f19a7236f7be17dd8 | [
"Apache-2.0"
] | null | null | null | runway/hooks/cleanup_s3.py | troyready/runway | 4fd299961a4b73df39e14f4f19a7236f7be17dd8 | [
"Apache-2.0"
] | null | null | null | """CFNgin hook for cleaning up resources prior to CFN stack deletion."""
# TODO move to runway.cfngin.hooks on next major release
import logging
from botocore.exceptions import ClientError
from ..cfngin.lookups.handlers.output import OutputLookup
from ..cfngin.lookups.handlers.rxref import RxrefLookup
from ..cfngin.lookups.handlers.xref import XrefLookup
LOGGER = logging.getLogger(__name__)
def purge_bucket(context, provider, **kwargs):
"""Delete objects in bucket."""
session = context.get_session()
if kwargs.get("bucket_name"):
bucket_name = kwargs["bucket_name"]
else:
if kwargs.get("bucket_output_lookup"):
value = kwargs["bucket_output_lookup"]
handler = OutputLookup.handle
elif kwargs.get("bucket_rxref_lookup"):
value = kwargs["bucket_rxref_lookup"]
handler = RxrefLookup.handle
elif kwargs.get("bucket_xref_lookup"):
value = kwargs["bucket_xref_lookup"]
handler = XrefLookup.handle
else:
LOGGER.error("bucket_name required but not defined")
return False
stack_name = context.get_fqn(value.split("::")[0])
try: # Exit early if the bucket's stack is already deleted
session.client("cloudformation").describe_stacks(StackName=stack_name)
except ClientError as exc:
if "does not exist" in exc.response["Error"]["Message"]:
LOGGER.info(
'stack "%s" does not exist; unable to resolve bucket name',
stack_name,
)
return True
raise
bucket_name = handler(value, provider=provider, context=context)
s3_resource = session.resource("s3")
try:
s3_resource.meta.client.head_bucket(Bucket=bucket_name)
except ClientError as exc:
if exc.response["Error"]["Code"] == "404":
LOGGER.info(
'bucket "%s" does not exist; unable to complete purge', bucket_name
)
return True
raise
bucket = s3_resource.Bucket(bucket_name)
bucket.object_versions.delete()
return True
| 35.096774 | 83 | 0.634191 | 249 | 2,176 | 5.39759 | 0.393574 | 0.066964 | 0.044643 | 0.055804 | 0.147321 | 0.072917 | 0 | 0 | 0 | 0 | 0 | 0.005041 | 0.27068 | 2,176 | 61 | 84 | 35.672131 | 0.84184 | 0.091912 | 0 | 0.270833 | 0 | 0 | 0.171079 | 0 | 0 | 0 | 0 | 0.016393 | 0 | 1 | 0.020833 | false | 0 | 0.104167 | 0 | 0.208333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77bf2ecef9748586b6a2dac8896799069e53843d | 5,800 | py | Python | mediapub_extensions/ApiWrappers/Snowflake.py | seattletimes/mediapub_extensions | 5b3dd3862f2ef59d494efe57bee679f0787952f4 | [
"Apache-2.0"
] | null | null | null | mediapub_extensions/ApiWrappers/Snowflake.py | seattletimes/mediapub_extensions | 5b3dd3862f2ef59d494efe57bee679f0787952f4 | [
"Apache-2.0"
] | 1 | 2019-08-21T21:53:30.000Z | 2019-08-21T21:53:30.000Z | mediapub_extensions/ApiWrappers/Snowflake.py | seattletimes/mediapub_extensions | 5b3dd3862f2ef59d494efe57bee679f0787952f4 | [
"Apache-2.0"
] | null | null | null | import snowflake.connector as snowcon
import json
import getpass
import sys
class Snowflake():
"""
Handles connections and queries to Snowflake Computing
This class contains methods to connect to the Snowflake service and run
queries on it.
Requires:
snowflake-connector-python: pip install snowflake-connector-python
Attributes:
user (str): The Snowflake username
password (str): The Snowflake Password
account (str): The Snowflake accountID
ctx (snowflake.connector): The connection to Snowflake
"""
user = None
password = None
account = None
ctx = None
def __init__(self, username=None, password=None, account=None, role='STAGE_R', db='ST_WEB', warehouse='ST_ANALYTICSAPI', schema='WEB_STAGE_META', verbose=False):
"""
Create a Snowflake connection for this instance.
Creates a Snowflake-Python connection. If no username and
password are passed, it will attempt other connection options (keyfile
and command prompt)
Args:
username (str): The Snowflake Username
password (str): The Snowflake Password
Yields:
snowflake.connector.connect: A Snowflake connection
"""
self.verbose = verbose
# First try passed in username and pass, then look for a keyfile, then ask the user
if username is not None and password is not None and account is not None:
self.set_creds_by_param(username, password, account)
else:
self.set_creds()
self.set_environment_settings(role, db, warehouse, schema)
if self.verbose: print("connecting to Snowflake...")
self.ctx = snowcon.connect(user=self.user, password=self.password, account=self.account)
#####################################################
# Connection Methods
#####################################################
def set_creds(self):
""" Get the Username and Password from the user """
self.account = input("Snowflake Account = ")
self.user = input("Snowflake UserName = ")
self.password = getpass.getpass()
def set_creds_by_param(self, username, passwd, account):
""" Set the Username and Password from the Args """
self.account = account
self.user = username
self.password = passwd
def set_environment_settings(self, role='STAGE_R', db='ST_WEB', warehouse='ST_ANALYTICSAPI', schema='WEB_STAGE_META'):
"""
Return settings to reach the Snowflake Enviornment
Return SQL to set the settings that are used to reach
Snowflake enviornments. Currently, production and staging
are the only enviornments in use.
Args:
env (str): The Snowflake enviornment settings to return
Returns:
role (str): SQL to set the correct Role.
db (str): SQL to set the target database.
warehouse (str): SQL to set the warehouse for data processing.
schema (str): SQL to set the schema queries are run from.
"""
self.ROLE = "USE ROLE {};".format(role)
self.DB = "USE {};".format(db)
self.WAREHOUSE = "USE WAREHOUSE {};".format(warehouse)
self.SCHEMA = "USE SCHEMA {};".format(schema)
#####################################################
# Query Methods
#####################################################
def run_query(self, SQL_CMD="SELECT current_version()", ignore_results=False):
"""
Run a supplied query on Snowflake
This function will run a query that is passed to it on the Snowflake platform.
Example:
run_query(env=\"production\", SQL_CMD=\"SELECT * FROM table\")
Args:
env (str): Specifies if this query should run on staging or production.
SQL_CMD (str): The SQL command to be run
Returns:
list: A list of tuples containing the row level results of the query.
Raises:
SQL compilation error:
"""
#NOTE: This does not have any checking on what queries are passed in and run. Limits enforced by roles
cs = self.ctx.cursor()
try:
cs.execute(self.ROLE)
cs.execute(self.DB)
cs.execute(self.SCHEMA)
cs.execute(self.WAREHOUSE)
cs.execute(SQL_CMD)
if ignore_results: return True
results = cs.fetchall()
return results
finally:
cs.close()
def push_files(self, PATH, stage):
""" Push the file to Snowflake Stage """
SQL_PUT = "put file://" + PATH + " @S_" + stage + " auto_compress=true;"
return self.run_query(SQL_PUT, ignore_results=True)
pass
def process_files(self, table, stage, format, on_error="SKIP_FILE", purge=True):
""" Process staged files """
SQL_COPY = "copy into " + table + " "\
"from @S_" + stage + " "\
"file_format = (format_name = " + format + ") "\
"ON_ERROR = " + on_error + " "\
"PURGE = " + purge + ";"
return self.run_query(SQL_COPY, ignore_results=True)
############################################################
# Queries
############################################################
def get_snowflake_version(self):
"""Return the Snowflake version number."""
results = self.run_query()
return results[0][0]
if __name__=='__main__':
print("Don't call directly. Install package and import as a class.")
sf = Snowflake()
sf.set_environment_settings("ETL_PULL", "ST_WEB", "ST_WEB_WH_PROD_ETL", "WEB_PROD")
print(sf.get_snowflake_version())
| 34.52381 | 165 | 0.58 | 666 | 5,800 | 4.936937 | 0.273273 | 0.036496 | 0.027372 | 0.016727 | 0.131995 | 0.085766 | 0.068127 | 0.068127 | 0.068127 | 0.03528 | 0 | 0.000477 | 0.276552 | 5,800 | 167 | 166 | 34.730539 | 0.783127 | 0.365345 | 0 | 0 | 0 | 0 | 0.151331 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.126984 | false | 0.15873 | 0.079365 | 0 | 0.349206 | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
77c1415a212d5d1d022140adadcb11fdbf2724c0 | 653 | py | Python | UVa Online Judge/v129/12916.py | mjenrungrot/algorithm | e0e8174eb133ba20931c2c7f5c67732e4cb2b703 | [
"MIT"
] | null | null | null | UVa Online Judge/v129/12916.py | mjenrungrot/algorithm | e0e8174eb133ba20931c2c7f5c67732e4cb2b703 | [
"MIT"
] | null | null | null | UVa Online Judge/v129/12916.py | mjenrungrot/algorithm | e0e8174eb133ba20931c2c7f5c67732e4cb2b703 | [
"MIT"
] | null | null | null | # =============================================================================
# Author: Teerapat Jenrungrot - https://github.com/mjenrungrot/
# FileName: 12916.py
# Description: UVa Online Judge - 12916
# =============================================================================
N = int(input())
for _ in range(N):
S = input()
lim = len(S) // 2
found = False
for k in range(1, lim+1):
if len(S) % k != 0:
continue
prefix = S[:k]
if prefix * (len(S) // k) == S:
print(k)
found = True
break
if not found:
print(len(S)) | 27.208333 | 79 | 0.355283 | 63 | 653 | 3.666667 | 0.571429 | 0.069264 | 0.04329 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.030905 | 0.306279 | 653 | 24 | 80 | 27.208333 | 0.479029 | 0.454824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77c2a84ba94b9959e2b284142d43e8fadfa6f26f | 268 | py | Python | test.py | adamrehn/docker-script | 440b2a119ff304cff3c698a5b9d1e679154ba5a5 | [
"MIT"
] | 5 | 2018-08-28T15:09:13.000Z | 2020-08-05T13:20:26.000Z | test.py | adamrehn/docker-script | 440b2a119ff304cff3c698a5b9d1e679154ba5a5 | [
"MIT"
] | null | null | null | test.py | adamrehn/docker-script | 440b2a119ff304cff3c698a5b9d1e679154ba5a5 | [
"MIT"
] | 3 | 2019-06-17T18:14:49.000Z | 2019-11-07T10:23:33.000Z | #!/usr/bin/env docker-script
#!python:latest python
import os, sys
details = os.uname()
print('Uname: ', details.sysname, details.release, details.machine)
print('Guest CWD:', os.getcwd())
print('Host CWD: ', os.environ['HOST_CWD'])
print('argv: ', sys.argv)
| 24.363636 | 70 | 0.679104 | 38 | 268 | 4.763158 | 0.578947 | 0.055249 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126866 | 268 | 10 | 71 | 26.8 | 0.773504 | 0.179104 | 0 | 0 | 0 | 0 | 0.220183 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0.666667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
77c4906ed898f5765c5f91376752902605a14837 | 9,340 | py | Python | mathscup v2.py | bosscoder88/maths-cup | c3d1d4755286c6fa14a877579cdbdc11402f5c72 | [
"CC0-1.0"
] | 1 | 2020-12-13T19:28:32.000Z | 2020-12-13T19:28:32.000Z | mathscup v2.py | bosscoder88/maths-cup | c3d1d4755286c6fa14a877579cdbdc11402f5c72 | [
"CC0-1.0"
] | null | null | null | mathscup v2.py | bosscoder88/maths-cup | c3d1d4755286c6fa14a877579cdbdc11402f5c72 | [
"CC0-1.0"
] | null | null | null | #### Startup ##
import random
from time import sleep as s
names = []
thirdPlace = []
for x in range(1, 33):
print("Name", x)
name = input("Please enter a name: ")
names.append(name)
#print(names) ## This line is for bug fixing, uncomments if you want to check all the names are in the list.
print("Welcome to Maths Cup!")
print("Today, the rounds will be as follows:\n")
print("""Round 1: Multiplication
Round 2: Division
Quarter Final: Bidmas
Semi Final: HCF and LCM
Third place/Final: Mean\n\n\n""")
s(5)
######################################
######################## Round 32 ####
######################################
print("Round 32")
for x in range(1, 17):
random.shuffle(names)
print("""==========
Question""", x, "\n==========")
no1 = random.randint(15, 50)
no2 = random.randint(10,30)
p1 = names[0]
players = int(len(names))
player = int(players - 1)
p2 = names[player]
print(p1, "is player 1, and", p2, "is player 2!")
input("Please press enter when you are ready to recieve your question.\n")
print("##########################\n")
print("Your question is:", no1, "x", no2, "\n")
print("##########################\n")
won = int(input("Please type either 1 or 2 depending on who won the round. "))
print("\nThe correct answer was indeed", no1*no2)
if won == 1:
del names[player]
print("\nCongratulations,", p1, "!")
print("Better luck next time,", p2, "!\n\n")
if won == 2:
del names[0]
print("Congratulations,", p2, "!")
print("Better luck next time,", p1, "!\n\n")
print("Who's made it through to the next round?")
for x in names:
print(x)
s(5)
######################################
######################## Round 16 ####
######################################
print("\n\n\nRound 16!")
print("For this round, please round your answers to 2 d.p. where necessary.")
for x in range(1, 9):
random.shuffle(names)
print("""==========
Question""", x, "\n==========")
no1 = random.randint(15, 50)
no2 = random.randint(4,12)
p1 = names[0]
players = int(len(names))
player = int(players - 1)
p2 = names[player]
print(p1, "is player 1, and", p2, "is player 2!")
input("Please press enter when you are ready to recieve your question.\n")
print("##########################\n")
print("Your question is:", no1, "รท", no2, "\n")
print("##########################\n")
won = int(input("Please type either 1 or 2 depending on who won the round. "))
print("\nThe correct answer was indeed", no1/no2)
if won == 1:
del names[player]
print("\nCongratulations,", p1, "!")
print("Better luck next time,", p2, "!\n\n")
if won == 2:
del names[0]
print("Congratulations,", p2, "!")
print("Better luck next time,", p1, "!\n\n")
print("Who's made it through to the next round?")
for x in names:
print(x)
s(5)
######################################
################### Quarter Final ####
######################################
print("\n\n\nQuarter Final!")
for x in range(1, 5):
random.shuffle(names)
print("""==========
Question""", x, "\n==========")
no1 = random.randint(15, 50)
no2 = random.randint(4, 12)
no3 = random.randint(10, 20)
p1 = names[0]
players = int(len(names))
player = int(players - 1)
p2 = names[player]
print(p1, "is player 1, and", p2, "is player 2!")
input("Please press enter when you are ready to recieve your question.\n")
print("##########################\n")
print("Your question is:", no1, "-", no2, "x", no3, "\n")
print("##########################\n")
won = int(input("Please type either 1 or 2 depending on who won the round. "))
print("\nThe correct answer was indeed", no1-no2*no3)
if won == 1:
del names[player]
print("\nCongratulations,", p1, "!")
print("Better luck next time,", p2, "!\n\n")
if won == 2:
del names[0]
print("Congratulations,", p2, "!")
print("Better luck next time,", p1, "!\n\n")
print("Who's made it through to the next round?")
for x in names:
print(x)
s(5)
######################################
###################### Semi Final ####
######################################
print("\n\n\nSemi Final!")
for x in range(1, 3):
random.shuffle(names)
print("""==========
Question""", x, "\n==========")
numbers1 = ["4", "6", "8", "10", "12", "14", "16", "18", "20", "24"]
numbers2 = ["8", "18", "36", "40", "28", "32", "54"]
no1 = random.choice(numbers1)
no2 = random.choice(numbers2)
p1 = names[0]
players = int(len(names))
player = int(players - 1)
p2 = names[player]
print(p1, "is player 1, and", p2, "is player 2!")
input("Please press enter when you are ready to recieve your question.\n")
print("##########################\n")
print("Please find the HCF and LCM of these numbers:", no1, "and", no2)
print("\n##########################\n")
won = int(input("Please type either 1 or 2 depending on who won the round. "))
###
number1 = int(no1)
number2 = int(no2)
temp1 = number1
temp2 = number2
while temp2 != 0:
t = temp2
temp2 = temp1%temp2
temp1 = t
hcf = temp1
lcm = (number1*number2)/hcf
print("The HCF was", hcf, "and the LCM was", lcm, "\n\n")
if won == 1:
thirdPlace.append(p2)
del names[player]
print("\nCongratulations,", p1, "!")
print("Better luck next time,", p2, "!\n\n")
if won == 2:
thirdPlace.append(p1)
del names[0]
print("Congratulations,", p2, "!")
print("Better luck next time,", p1, "!\n\n")
print("Who's made it through to the next round?")
for x in names:
print(x)
print("\nHowever, this time, these people made it to the 3rd place round!")
for entry in thirdPlace:
print(entry)
s(5)
######################################
############## 3rd place play-off ####
######################################
print("\n\n\n3rd Place play-off!")
random.shuffle(names)
print("""==========
Question 1
==========""")
numbers = []
for x in range(1,10):
numbers.append(random.randint(2, 14))
#print(numbers) ## again this is for debugging, uncomment to check the list of numbers.
p1 = thirdPlace[0]
players = int(len(thirdPlace))
player = int(players - 1)
p2 = thirdPlace[player]
print(p1, "is player 1, and", p2, "is player 2!")
input("Please press enter when you are ready to recieve your question.\n")
print("##########################\n")
print("Please find the mean of this sequence:")
for entry in numbers:
print(entry, end=', ')
print("\n\n##########################\n")
won = int(input("Please type either 1 or 2 depending on who won the round. "))
print("\nThe correct answer was indeed", sum(numbers)/len(numbers))
if won == 1:
print("\nCongratulations,", p1, "! You are third place in today's Maths Cup!")
print("Better luck next time,", p2, "!\n\n")
if won == 2:
print("Congratulations,", p2, "! You are third place in today's Maths Cup!")
print("Better luck next time,", p1, "!\n\n")
print("Let's go to the final and see", names[0], "and", names[1], "battle it out!")
s(5)
######################################
####################### The Final ####
######################################
print("\n\n\nWelcome to the final!\n")
print("""In the final, there will be 3 questions.
Whoever gets two of those questions correct will be the winner of today's Maths Cup.\n""")
random.shuffle(names)
p1won = 0
p2won = 0
for x in range(1, 4):
print("""==========
Question""", x, "\n==========")
numbers = []
for x in range(1,10):
numbers.append(random.randint(2, 14))
p1 = thirdPlace[0]
players = int(len(thirdPlace))
player = int(players - 1)
p2 = thirdPlace[player]
print(p1, "is player 1, and", p2, "is player 2!")
input("Please press enter when you are ready to recieve your question.\n")
print("##########################\n")
print("Please find what the mean is in this sequence:")
for entry in numbers:
print(entry, end=', ')
print("\n\n##########################\n")
won = int(input("Please type either 1 or 2 depending on who won the round. "))
print("\nThe correct answer was indeed", sum(numbers)/len(numbers))
if won == 1:
p1won = p1won + 1
if p1won < 2:
print("Congratulations", p1, ", you have now won", p1won, "games!")
print("You can still catch up", p2, ", you have won", p2won, "games!")
elif p1won == 2:
print("Congratuations", p1, "you have just won today's Maths Cup!")
if won == 2:
p2won = p2won + 1
if p2won < 2:
print("Congratulations", p2, ", you have now won", p2won, "games!")
print("You can still catch up", p1, ", you have won", p1won, "games!")
elif p2won == 2:
print("Congratuations", p2, "you have just won today's Maths Cup!")
| 35.245283 | 109 | 0.509957 | 1,232 | 9,340 | 3.866883 | 0.148539 | 0.009656 | 0.015113 | 0.039882 | 0.678631 | 0.651973 | 0.644836 | 0.626154 | 0.6144 | 0.612511 | 0 | 0.039263 | 0.239186 | 9,340 | 264 | 110 | 35.378788 | 0.631016 | 0.029336 | 0 | 0.641256 | 0 | 0 | 0.419489 | 0.042303 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.008969 | 0 | 0.008969 | 0.390135 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77c4ac6e0829dc7178a6b601892640733f30e784 | 2,736 | py | Python | hmrc_sdes/tests/test_client.py | uktrade/tamato | 4ba2ffb25eea2887e4e081c81da7634cd7b4f9ca | [
"MIT"
] | 14 | 2020-03-25T11:11:29.000Z | 2022-03-08T20:41:33.000Z | hmrc_sdes/tests/test_client.py | uktrade/tamato | 4ba2ffb25eea2887e4e081c81da7634cd7b4f9ca | [
"MIT"
] | 352 | 2020-03-25T10:42:09.000Z | 2022-03-30T15:32:26.000Z | hmrc_sdes/tests/test_client.py | uktrade/tamato | 4ba2ffb25eea2887e4e081c81da7634cd7b4f9ca | [
"MIT"
] | 3 | 2020-08-06T12:22:41.000Z | 2022-01-16T11:51:12.000Z | import json
import os
from hashlib import md5
from unittest.mock import Mock
import pytest
from common.tests import factories
from hmrc_sdes.api_client import HmrcSdesClient
pytestmark = pytest.mark.django_db
def test_sdes_client(responses):
responses.add(
responses.POST,
url="https://test-api.service.hmrc.gov.uk/oauth/token",
json={
"access_token": "access_token",
"token_type": "bearer",
"expires_in": 3600,
"refresh_token": "refresh_token",
"scope": "write:transfer-complete write:transfer-ready",
},
)
client = HmrcSdesClient()
assert len(responses.calls) == 1
responses.add(
responses.POST,
url="https://test-api.service.hmrc.gov.uk/organisations/notification/files/transfer/ready/test-srn",
)
upload = factories.UploadFactory.build(
correlation_id="test-correlation-id",
file=Mock(size=1),
checksum="test-checksum",
)
client.notify_transfer_ready(upload)
request = responses.calls[1].request
hmrc_json = "application/vnd.hmrc.1.0+json"
assert request.headers["Accept"] == hmrc_json
assert request.headers["Content-Type"] == f"{hmrc_json}; charset=UTF-8"
assert json.loads(responses.calls[1].request.body) == {
"informationType": "EDM",
"correlationID": upload.correlation_id,
"file": {
"fileName": upload.filename,
"fileSize": upload.file.size,
"checksum": upload.checksum,
"checksumAlgorithm": "MD5",
},
}
@pytest.mark.hmrc_live_api
def test_api_call(responses, settings):
responses.add_passthru(settings.HMRC["base_url"])
# reload settings from env, overriding test settings
dotenv.read_dotenv(os.path.join(settings.BASE_DIR, ".env"))
settings.HMRC["client_id"] = os.environ.get("HMRC_API_CLIENT_ID")
settings.HMRC["client_secret"] = os.environ.get("HMRC_API_CLIENT_SECRET")
settings.HMRC["service_reference_number"] = os.environ.get(
"HMRC_API_SERVICE_REFERENCE_NUMBER"
)
# fetches OAuth2 access token on instantiation
client = HmrcSdesClient()
assert client.get_session().token is not None
# check fraud prevention headers
result = client.get(
f"{client.base_url}/test/fraud-prevention-headers/validate",
).json()
assert result.get("errors") is None
# generate a dummy upload of an empty file with a valid checksum
upload = factories.UploadFactory()
upload.file = Mock(size=0)
upload.checksum = md5("".encode("utf-8")).hexdigest()
response = client.notify_transfer_ready(upload)
assert response.status_code == 204 # no data on success
| 30.065934 | 108 | 0.666667 | 328 | 2,736 | 5.42378 | 0.396341 | 0.02923 | 0.025295 | 0.026981 | 0.136594 | 0.091062 | 0.062957 | 0.062957 | 0.062957 | 0.062957 | 0 | 0.009259 | 0.210526 | 2,736 | 90 | 109 | 30.4 | 0.814352 | 0.076023 | 0 | 0.090909 | 0 | 0.015152 | 0.256939 | 0.074148 | 0 | 0 | 0 | 0 | 0.106061 | 1 | 0.030303 | false | 0.015152 | 0.106061 | 0 | 0.136364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77c4b823daa25924e42b9613fee9df7dab7601e4 | 6,301 | py | Python | model/train/AdvancedEAST.py | JinGyeSetBirdsFree/FudanOCR | e6b18b0eefaf832b2eb7198f5df79e00bd4cee36 | [
"MIT"
] | 25 | 2020-02-29T12:14:10.000Z | 2020-04-24T07:56:06.000Z | model/train/AdvancedEAST.py | dun933/FudanOCR | fd79b679044ea23fd9eb30691453ed0805d2e98b | [
"MIT"
] | 33 | 2020-12-10T19:15:39.000Z | 2022-03-12T00:17:30.000Z | model/train/AdvancedEAST.py | dun933/FudanOCR | fd79b679044ea23fd9eb30691453ed0805d2e98b | [
"MIT"
] | 4 | 2020-02-29T12:14:18.000Z | 2020-04-12T12:26:50.000Z | # -*- coding: utf-8 -*-
def train_AEAST(config_file):
import sys
sys.path.append('./detection_model/AdvancedEAST')
import os
import argparse
import time
import numpy as numpy
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
from torch.utils.data import DataLoader
from torch.optim import Adam
from torch.optim.lr_scheduler import LambdaLR
from tqdm import tqdm
import config as cfg
from utils.data_utils import custom_dset, collate_fn
from network.AEast import East
from network.loss import LossFunc
from utils.utils import AverageMeter, save_log
from utils.earlystop import EarlyStopping
os.environ["CUDA_VISIBLE_DEVICES"] = "0,3"
from yacs.config import CfgNode as CN
def read_config_file(config_file):
f = open(config_file)
opt = CN.load_cfg(f)
return opt
opt = read_config_file(config_file)
class Wrapped:
def __init__(self, train_loader, val_loader, model, criterion, optimizer, scheduler, start_epoch, val_loss_min):
self.train_loader = train_loader
self.val_loader = val_loader
self.model = model
self.criterion = criterion
self.optimizer = optimizer
self.scheduler = scheduler #
self.start_epoch = start_epoch #
self.tick = time.strftime("%Y%m%d-%H-%M-%S", time.localtime(time.time()))
self.earlystopping = EarlyStopping(opt.patience, val_loss_min)
def __call__(self):
for epoch in tqdm(range(self.start_epoch + 1, opt.max_epoch + 1), desc='Epoch'):
if epoch == 1:
tqdm.write("Validating pretrained model.")
self.validate(0)
if epoch > 1 and epoch % opt.decay_step == 0:
tqdm.write("Learning rate - Epoch: [{0}]: {1}".format(epoch - 1,self.optimizer.param_groups[0]['lr']))
self.train(epoch)
if self.validate(epoch): # if earlystop
print('Earlystopping activates. Training stopped.')
break
def validate(self, epoch):
losses = AverageMeter()
self.model.eval()
for i, (img, gt) in tqdm(enumerate(self.val_loader), desc='Val', total=len(self.val_loader)):
img = img.cuda()
gt = gt.cuda()
east_detect = self.model(img)
loss = self.criterion(gt, east_detect)
losses.update(loss.item(), img.size(0))
tqdm.write('Validate Loss - Epoch: [{0}] Avg Loss {1}'.format(epoch,losses.avg))
save_log(losses, epoch, i + 1, len(self.val_loader), self.tick, split='Validation')
earlystop, save = self.earlystopping(losses.avg)
if not earlystop and save:
state = {
'epoch': epoch,
'state_dict': self.model.module.state_dict(),
'optimizer': self.optimizer.state_dict(),
'scheduler': self.scheduler.state_dict(),
'val_loss_min': losses.avg
}
self.earlystopping.save_checkpoint(state, losses.avg)
return earlystop
def train(self, epoch):
losses = AverageMeter()
self.model.train()
for i, (img, gt) in tqdm(enumerate(self.train_loader), desc='Train', total=len(self.train_loader)):
img = img.cuda()
gt = gt.cuda()
east_detect = self.model(img)
loss = self.criterion(gt, east_detect)
losses.update(loss.item(), img.size(0))
# backward propagation
self.scheduler.step()
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
if (i + 1) % opt.print_step == 0:
tqdm.write(
'Training loss - Epoch: [{0}][{1}/{2}] Loss {loss.val:.4f} Avg Loss {loss.avg:.4f}'.format(
epoch, i + 1, len(self.train_loader), loss=losses))
save_log(losses, epoch, i + 1, len(self.train_loader), self.tick, split='Training')
class LRPolicy:
def __init__(self, rate, step):
self.rate = rate
self.step = step
def __call__(self, it):
return self.rate ** (it // self.step)
print('=== AdvancedEAST ===')
print('Task id: {0}'.format(opt.task_id))
print('=== Initialzing DataLoader ===')
print('Multi-processing on {0} cores'.format(opt.num_process))
batch_size = opt.batch_size_per_gpu
trainset = custom_dset(split='train')
valset = custom_dset(split='val')
train_loader = DataLoader(trainset, batch_size=batch_size, shuffle=True, collate_fn=collate_fn,
num_workers=opt.num_workers, drop_last=False)
val_loader = DataLoader(valset, batch_size=1, collate_fn=collate_fn, num_workers=opt.num_workers)
print('=== Building Network ===')
model = East()
model = model.cuda()
os.environ["CUDA_VISIBLE_DEVICES"] = "1,2"
model = nn.DataParallel(model, device_ids=opt.gpu_ids) # ๆฐๆฎๅนถ่ก
params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print('Total parameters: {0}'.format(params))
cudnn.benchmark = True
criterion = LossFunc()
optimizer = Adam(model.parameters(), lr=opt.lr_rate)
# decay every opt.decay_step epoch / every decay_step iter
decay_step = len(train_loader) * opt.decay_step
scheduler = LambdaLR(optimizer, lr_lambda=LRPolicy(rate=opt.decay_rate, step=decay_step))
print('Batch size: {0}'.format(batch_size))
print('Initial learning rate: {0}\nDecay step: {1}\nDecay rate: {2}\nPatience: {3}'.format(
opt.lr_rate, opt.decay_step, opt.decay_rate, opt.patience))
start_epoch = 0
val_loss_min = None
print('=== Training ===')
wrap = Wrapped(train_loader, val_loader, model, criterion, optimizer, scheduler, start_epoch, val_loss_min)
wrap()
| 40.651613 | 123 | 0.577845 | 749 | 6,301 | 4.699599 | 0.23498 | 0.03125 | 0.025568 | 0.008523 | 0.213636 | 0.184659 | 0.164205 | 0.153977 | 0.122727 | 0.099432 | 0 | 0.008947 | 0.308205 | 6,301 | 154 | 124 | 40.915584 | 0.798578 | 0.018568 | 0 | 0.096 | 0 | 0.016 | 0.107125 | 0.004983 | 0 | 0 | 0 | 0 | 0 | 1 | 0.064 | false | 0 | 0.152 | 0.008 | 0.256 | 0.088 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77c6181206707ee1bccc346554b686167d34ae86 | 1,282 | py | Python | Python3/Books/Douson/chapter04/hero's_inventory2.py | neon1ks/Study | 5d40171cf3bf5e8d3a95539e91f5afec54d1daf3 | [
"MIT"
] | null | null | null | Python3/Books/Douson/chapter04/hero's_inventory2.py | neon1ks/Study | 5d40171cf3bf5e8d3a95539e91f5afec54d1daf3 | [
"MIT"
] | null | null | null | Python3/Books/Douson/chapter04/hero's_inventory2.py | neon1ks/Study | 5d40171cf3bf5e8d3a95539e91f5afec54d1daf3 | [
"MIT"
] | null | null | null | # Hero's Inventory 2.0
# Demonstrates tuples
# create a tuple with some items and display with a for loop
inventory = ("sword",
"armor",
"shield",
"healing potion")
print("Your items:")
for item in inventory:
print(item)
input("\nPress the enter key to continue.")
# get the length of a tuple
print("You have", len(inventory), "items in your possession.")
input("\nPress the enter key to continue.")
# test for membership with in
if "healing potion" in inventory:
print("You will live to fight another day.")
# display one item through an index
index = int(input("\nEnter the index number for an item in inventory: "))
print("At index", index, "is", inventory[index])
# display a slice
start = int(input("\nEnter the index number to begin a slice: "))
finish = int(input("Enter the index number to end the slice: "))
print("inventory[", start, ":", finish, "] is", end=" ")
print(inventory[start:finish])
input("\nPress the enter key to continue.")
# concatenate two tuples
chest = ("gold", "gems")
print("You find a chest. It contains:")
print(chest)
print("You add the contents of the chest to your inventory.")
inventory += chest
print("Your inventory is now:")
print(inventory)
input("\n\nPress the enter key to exit.")
| 27.869565 | 73 | 0.682527 | 191 | 1,282 | 4.581152 | 0.39267 | 0.041143 | 0.064 | 0.077714 | 0.195429 | 0.173714 | 0.109714 | 0 | 0 | 0 | 0 | 0.001919 | 0.187207 | 1,282 | 45 | 74 | 28.488889 | 0.837812 | 0.176287 | 0 | 0.111111 | 0 | 0 | 0.507163 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.444444 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
77c66ea30fa9ef6d5f9730bf0414d7cdb56d8682 | 3,579 | py | Python | jax_influence/selection.py | google-research/jax-influence | 74bd321156b5445bb35b9594568e4eaaec1a76a3 | [
"Apache-2.0"
] | 8 | 2022-02-17T10:19:27.000Z | 2022-03-28T12:33:57.000Z | jax_influence/selection.py | google-research/jax-influence | 74bd321156b5445bb35b9594568e4eaaec1a76a3 | [
"Apache-2.0"
] | null | null | null | jax_influence/selection.py | google-research/jax-influence | 74bd321156b5445bb35b9594568e4eaaec1a76a3 | [
"Apache-2.0"
] | 1 | 2022-03-02T14:32:56.000Z | 2022-03-02T14:32:56.000Z | # Copyright 2021 The Jax Influence Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for selecting subsets of parameters."""
from typing import Mapping, Union, Tuple
import flax
from flax import traverse_util
import jax
import jax.numpy as jnp
from jax_influence.types import PyTree
from jax_influence.types import SelectionFn
def split_params(params: PyTree,
select_path_fn: SelectionFn) -> Tuple[PyTree, PyTree]:
"""Decomposes parameters in two pieces using a selection function.
Args:
params: Frozen dict of parameters.
select_path_fn: Evaluates to True for the parameter paths to be selected.
Returns:
A Tuple (`selected', `unselected'), where the first contains the parameters
taken by the selection function and the second contains the
remaining parameters.
"""
flattened = traverse_util.flatten_dict(flax.core.unfreeze(params))
selected, unselected = dict(), dict()
for k, v in flattened.items():
if select_path_fn(k):
selected[k] = v
else:
unselected[k] = v
selected = traverse_util.unflatten_dict(selected)
unselected = traverse_util.unflatten_dict(unselected)
return flax.core.FrozenDict(selected), flax.core.FrozenDict(unselected)
def merge_params(params_left: PyTree, params_right: PyTree) -> PyTree:
"""Merges two dictionaries of parameters.
Args:
params_left: First PyTree.
params_right: Second PyTree.
Returns:
The merge of the two parameter PyTrees.
"""
out = traverse_util.flatten_dict(flax.core.unfreeze(params_left))
params_right = traverse_util.flatten_dict(flax.core.unfreeze(params_right))
for k, v in params_right.items():
assert k not in params_left
out[k] = v
out = traverse_util.unflatten_dict(out)
return flax.core.FrozenDict(out)
def param_size(params: PyTree) -> int:
"""Computes the total size of parameters."""
sizes = jax.tree_map(jnp.size, params)
return sum(jax.tree_leaves(sizes))
def summarize_split_effect(
params: PyTree,
select_fn: SelectionFn) -> Mapping[str, Union[str, int, float]]:
"""Summarizes the effect of splitting parameters."""
total_size = param_size(params)
sel, _ = split_params(params, select_fn)
selected_size = param_size(sel)
out = {}
out['total_size'] = total_size
out['pretty_total_size'] = f'{total_size:.3e}'
out['selected_size'] = selected_size
out['pretty_selected_size'] = f'{selected_size:.3e}'
out['selected%'] = selected_size / total_size
return out
| 33.448598 | 79 | 0.740151 | 512 | 3,579 | 5.0625 | 0.298828 | 0.046296 | 0.020062 | 0.024691 | 0.393904 | 0.373071 | 0.373071 | 0.373071 | 0.320988 | 0.320988 | 0 | 0.006063 | 0.170439 | 3,579 | 106 | 80 | 33.764151 | 0.866959 | 0.492316 | 0 | 0 | 0 | 0 | 0.059942 | 0 | 0 | 0 | 0 | 0 | 0.023256 | 1 | 0.093023 | false | 0 | 0.162791 | 0 | 0.348837 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77c6869548be7b262a6b24e861485690a9a1e3b0 | 20,327 | py | Python | CureIAM/processors/gcpcloudiam.py | gojek/CureIAM | 83cdf6ef4d61b563ae8ac69fbf008f8338f6361f | [
"Apache-2.0"
] | 17 | 2021-11-10T08:32:31.000Z | 2022-03-03T12:20:38.000Z | CureIAM/processors/gcpcloudiam.py | gojekfarm/CureIAM | 83cdf6ef4d61b563ae8ac69fbf008f8338f6361f | [
"Apache-2.0"
] | null | null | null | CureIAM/processors/gcpcloudiam.py | gojekfarm/CureIAM | 83cdf6ef4d61b563ae8ac69fbf008f8338f6361f | [
"Apache-2.0"
] | 3 | 2021-11-11T17:20:16.000Z | 2021-12-02T20:00:34.000Z | """Plugin to process the data retrieved from `gcpcloud.CureIAM` plugin
"""
import json
import logging
import datetime
from CureIAM.models.iamriskscore import IAMRiskScoreModel
from CureIAM.models.applyrecommendationmodel import IAMApplyRecommendationModel
from CureIAM import util
_log = logging.getLogger(__name__)
class GCPIAMRecommendationProcessor:
"""SimpleProcessor plugin to perform processing on
gcpcloud.CureIAM IAMRecommendation_record."""
def __init__(self, enable_enforcer=False, enforcer=None):
"""Create an instance of :class:`GCPIAMRecommendationProcessor` plugin.
"""
self._recommendation_applied = 0
self._recommendation_applied_today = 0
self._enforcer = enforcer
self._enable_enforcer = enable_enforcer
if self._enforcer:
self._apply_recommendation_allowlist_projects = enforcer.get('allowlist_projects', None)
# Don't perform operations on these projects
self._apply_recommendation_blocklist_projects = enforcer.get('blocklist_projects', None)
# Don't perform operations on these accounts_ids
self._apply_recommendation_blocklist_accounts = enforcer.get('blocklist_accounts', None)
self._apply_recommendation_allowlist_account_types = enforcer.get('allowlist_account_types', ['user', 'group'])
self._apply_recommendation_blocklist_account_types = enforcer.get('blocklist_account_types', ['serviceAccount'])
# Min recommendation apply score is 60 to default for user
self._apply_recommendation_min_score_user = enforcer.get('min_safe_to_apply_score_user', 60)
# Min recommendation apply score is 60 to default for groups
self._apply_recommendation_min_score_group = enforcer.get('min_safe_to_apply_score_group', 60)
# Min recommendation apply score is 60 to default for SA
self._apply_recommendation_min_score_SA = enforcer.get('min_safe_to_apply_score_SA', 60)
self._apply_recommendations_svc_acc_key_file = enforcer.get('key_file_path', None)
self._cloud_resource = util.build_resource(
service_name='cloudresourcemanager',
key_file_path=self._apply_recommendations_svc_acc_key_file
)
self._recommender_resource = util.build_resource(
service_name='recommender',
key_file_path=self._apply_recommendations_svc_acc_key_file
)
def eval(self, record):
"""Function to perform data processing.
Arguments:
record (dict): Record to evaluate.
{
'raw': {
"name": "projects/{project-id}/locations/{location}/recommenders/google.iam.policy.Recommender/recommendations/{recommendation-id}",
"description": "Replace the current role with a smaller role to cover the permissions needed.",
"lastRefreshTime": "2021-01-18T08:00:00Z",
"primaryImpact": {
"category": "SECURITY"
},
"content": {
"operationGroups": [
{
"operations": [
{
"action": "add",
"resourceType": "cloudresourcemanager.googleapis.com/Project",
"resource": "//cloudresourcemanager.googleapis.com/projects/565961175665",
"path": "/iamPolicy/bindings/*/members/-",
"value": "user:foo@bar.com",
"pathFilters": {
"/iamPolicy/bindings/*/condition/expression": "",
"/iamPolicy/bindings/*/role": "roles/storage.objectCreator"
}
},
{
"action": "remove",
"resourceType": "cloudresourcemanager.googleapis.com/Project",
"resource": "//cloudresourcemanager.googleapis.com/projects/565961175665",
"path": "/iamPolicy/bindings/*/members/*",
"pathFilters": {
"/iamPolicy/bindings/*/condition/expression": "",
"/iamPolicy/bindings/*/members/*": "user:<user-name>@<doamin.com>",
"/iamPolicy/bindings/*/role": "roles/storage.objectAdmin"
}
}
}
]
},
"stateInfo": {
"state": "ACTIVE"
},
"etag": "\"ef625ab631b20e49\"",
"recommenderSubtype": "REPLACE_ROLE",
"associatedInsights": [
{
"insight": "projects/{project-id}/locations/{location}/recommenders/google.iam.policy.Recommender/recommendations/{recommendation-id}"
}
]
}
}
Yields:
dict: Processed record.
{
'GCPIAMProcessor': {
'record_type': 'iam_recommendation'
'recommendation_name' : name,
'project': project,
'recommendation_description' : description,
'recommendation_action': content.operationGroups.operations[i],
'recommendetion_recommender_subtype': recommenderSubtype,
'recommendation_insights': associatedInsights
}
}
"""
# Extract the different `CureIAM_record.recommendation_action.value`
# from the gcpcloud.GCPCloudIAMRecommendations
iam_raw_record = record.get('raw', {})
recommendation_dict = dict()
if iam_raw_record is not None:
recommendation_dict.update(
{
'project' : iam_raw_record['project'],
'recommendation_id': iam_raw_record['name'],
'recommendation_description': iam_raw_record['description'],
'recommendation_actions' : iam_raw_record['content']['operationGroups'][0]['operations'],
'recommendetion_recommender_subtype': iam_raw_record['recommenderSubtype'],
'recommendation_insights': [ i.get('insights') for i in iam_raw_record['associatedInsights']]
}
)
# Identify the account type on which recommendation is fetched
# If iam_raw_record['recommenderSubtype'] is REPLACE_ROLE, then user
# info will be present as iam_raw_record['content']['operationGroups']['operations'] list
_actor = ''
_actor_total_permissions = 0
_actor_exercised_permissions = 0
_actor_exercised_permissions_category = ''
for op_grp in iam_raw_record['content']['operationGroups']:
for op in op_grp['operations']:
if op['action'] == 'remove':
_actor = op['pathFilters']['/iamPolicy/bindings/*/members/*']
# After above parsing _actor would contain something like
# <account_type>:<account_id>
_actor_type, _actor = _actor.split(':')
recommendation_dict.update(
{
'account_type': _actor_type,
'account_id': _actor
}
)
# Get all the Permissions the current actor have
# insights is a list, in case of multiple insights
# all insights will have same `currentTotalPermissionsCount`
# So we are good to include the results from the first one
# only.
insights = iam_raw_record.get('insights', None)
if insights:
_content = insights[0].get('content', None)
if _content:
_actor_exercised_permissions = len(_content.get(
'exercisedPermissions',
[]
)) + len(
_content.get(
'inferredPermissions',
[]
)
)
_actor_total_permissions = _content.get(
'currentTotalPermissionsCount',
'0'
)
_actor_exercised_permissions_category = insights[0].get(
'category',
''
)
recommendation_dict.update(
{
'account_total_permissions': int(_actor_total_permissions),
'account_used_permissions': _actor_exercised_permissions,
'account_permission_insights_category': _actor_exercised_permissions_category
}
)
_res = {
'raw': iam_raw_record,
'processor': recommendation_dict ,
'score': IAMRiskScoreModel(recommendation_dict).score(),
'apply_recommendation': IAMApplyRecommendationModel(recommendation_dict).model()
}
_res['apply_recommendation'].update(
{
'safe_to_apply_score': _res['score']['safe_to_apply_recommendation_score']
}
)
# If recommendation was applied in past
# update the risk score and safe_to_apply_
# _score to 0
if _res['raw']['stateInfo']['state']=='SUCCEEDED':
_res['score'].update(
{
'risk_score': 0,
'over_privilege_score': 0
}
)
self._recommendation_applied += 1
_log.info('Recommendation %s applied in past, setting score to 0', recommendation_dict['recommendation_id'])
# enforce the recommendation before saving it in DB.
# Also dont re-apply the recommendation is it is already applied
if self._enforcer and _res['raw']['stateInfo']['state']=='ACTIVE':
_log.info('Enforcing recommendation %s ...', recommendation_dict['recommendation_id'])
_recomemndation_applied = self._enforce_recommendation(_res)
if _recomemndation_applied:
_res['raw']['stateInfo']['state'] = 'SUCCEEDED'
_res['apply_recommendation'].update(
{
'recommendation_state': 'Applied',
'recommendation_applied_time': str(datetime.datetime.utcnow().isoformat())
}
)
_res['score'].update(
{
'risk_score': 0,
'over_privilege_score': 0
}
)
self._recommendation_applied_today += 1
_log.info('Applied Recommendation %s', recommendation_dict['recommendation_id'])
else:
_log.warn('Recommendation %s not applied', recommendation_dict['recommendation_id'])
yield _res
def _enforce_recommendation(self, record):
"""Method to perform Recommendation enforcement
IAM recommendation doesn't have API to apply the recommendation
directly rather we will have to create IAM resource which will
perform the policy enforcement. This method does the same.
Arguments:
record(dict): dict record contaning raw + processor record
Returns:
bool: Indicating if the we were able to successfully apply
recommendation or not.
"""
"""
Flow:
Apply IAM policy from recommender
- success
- mark recommendation as succeeded
- return True
- no
- dont change the recommendation status
- return False
"""
if not self._enable_enforcer :
return
cloud_resource = self._cloud_resource
recommender_resource = self._recommender_resource
_processor_record = record.get('processor', None)
_score_record = record.get('score', None)
if _processor_record and _score_record:
_project = _processor_record.get('project', None)
_recommendation_actions = _processor_record.get('recommendation_actions', None)
_recommendation_id = _processor_record.get('recommendation_id', None)
_account_id = _processor_record.get('account_id')
_account_type = _processor_record.get('account_type')
_safety_score = _score_record.get('safe_to_apply_recommendation_score', None)
_we_want_to_apply_recommendation = False
_log.info('Testing recommendation for project %s; account %s; safety_score %d',
_project,
_account_id,
_safety_score)
if (
(
self._apply_recommendation_allowlist_projects is None
or (_project not in self._apply_recommendation_blocklist_projects
and _project in self._apply_recommendation_allowlist_projects)
)
and
(
_account_type not in self._apply_recommendation_blocklist_account_types
and _account_type in self._apply_recommendation_allowlist_account_types
)
and
(
_account_id not in self._apply_recommendation_blocklist_accounts
)
):
# If Recommendation is for SA, apply only for ['REMOVE_ROLE', 'REPLACE_ROLE']
if (
_account_type == 'serviceAccount'
and _processor_record.get('recommendetion_recommender_subtype') in ['REMOVE_ROLE', 'REPLACE_ROLE']
):
_we_want_to_apply_recommendation = True
else:
if _account_type != 'serviceAccount':
# If user is owner of any project dont apply recommendation
# <TODO> this is very bad of detecting owners, need to find better way of doing this.
if not 'owner' in str(record['raw']['content']['operationGroups']):
_we_want_to_apply_recommendation = True
if _account_id == 'user' and _safety_score < self._apply_recommendation_min_score_user:
_we_want_to_apply_recommendation = False
elif _account_id == 'group' and _safety_score < self._apply_recommendation_min_score_group:
_we_want_to_apply_recommendation = False
elif _account_id == 'SA' and _safety_score < self._apply_recommendation_min_score_SA:
_we_want_to_apply_recommendation = False
if _we_want_to_apply_recommendation:
_log.info('Applying recommendation for project %s; account %s; account_type %s ; safety_score %d',
_project,
_account_id,
_account_type,
_safety_score)
_policies = (
cloud_resource.projects()
.getIamPolicy(
resource=_project,
body={"options": {"requestedPolicyVersion": "1"}}
).execute()
)
_updated_policies = _policies
for _recommendation_action in _recommendation_actions:
if _recommendation_action.get('action') == 'remove':
member = (
_recommendation_action.get('pathFilters')
.get('/iamPolicy/bindings/*/members/*')
)
role = (
_recommendation_action.get('pathFilters')
.get('/iamPolicy/bindings/*/role')
)
_updated_policies = self.modify_policy_remove_member(
_updated_policies,
role,
member
)
elif _recommendation_action.get('action') == 'add':
member = _recommendation_action.get('value')
role = (
_recommendation_action.get('pathFilters')
.get('/iamPolicy/bindings/*/role')
)
_updated_policies = self.modify_policy_add_member(
_updated_policies,
role,
member
)
#Apply the policies present in recommendations
policy = (
cloud_resource.projects()
.setIamPolicy(resource=_project, body={'policy': _updated_policies})
.execute()
)
# print(policy)
# Update the recommendation status.
_status = (
recommender_resource
.projects()
.locations()
.recommenders()
.recommendations()
.markSucceeded(
body={
'etag': record.get('raw').get('etag'),
'stateMetadata': {
'reviewed-by': 'cureiam',
'owned-by': 'security'
}
},
name=_recommendation_id)
.execute()
)
# So we have applied recommendation and we are good.
return True
return False
def modify_policy_remove_member(self, policy, role, member):
"""Removes a member from a role binding."""
try:
binding = next(b for b in policy["bindings"] if b["role"] == role)
if "members" in binding and member in binding["members"]:
binding["members"].remove(member)
except StopIteration:
# Policy removed in previous iterations
pass
return policy
def modify_policy_add_member(self, policy, role, member):
"""Adds a new role binding to a policy."""
binding = {"role": role, "members": [member]}
policy["bindings"].append(binding)
return policy
def done(self):
"""Perform cleanup work.
Since this is a mock plugin, this method does nothing. However,
a typical event plugin may or may not need to perform cleanup
work in this method depending on its nature of work.
"""
_log.info('Recommendation applied: %s; Recommendations applied today: %s',
self._recommendation_applied, self._recommendation_applied_today) | 46.197727 | 166 | 0.509421 | 1,615 | 20,327 | 6.095975 | 0.199381 | 0.059827 | 0.039716 | 0.009243 | 0.326663 | 0.254241 | 0.173794 | 0.136516 | 0.114678 | 0.101168 | 0 | 0.006461 | 0.413735 | 20,327 | 440 | 167 | 46.197727 | 0.819669 | 0.276775 | 0 | 0.208494 | 0 | 0 | 0.149329 | 0.04625 | 0 | 0 | 0 | 0.002273 | 0 | 1 | 0.023166 | false | 0.003861 | 0.023166 | 0 | 0.069498 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77c73cfb980dddd0f00259fb140949459a2b3bcc | 4,994 | py | Python | Stream Ciphers/grain-128.py | PitCoder/Cryptography | 42735dc2375b1794b4c4a5b0d029c3b99b91538c | [
"MIT"
] | null | null | null | Stream Ciphers/grain-128.py | PitCoder/Cryptography | 42735dc2375b1794b4c4a5b0d029c3b99b91538c | [
"MIT"
] | null | null | null | Stream Ciphers/grain-128.py | PitCoder/Cryptography | 42735dc2375b1794b4c4a5b0d029c3b99b91538c | [
"MIT"
] | null | null | null | from bitstring import BitArray, BitStream
# Definition of constants
KEY_SIZE = 128
REGISTER_SIZE = 128
LAST_INDEX = 127
INITIALIZATION_CLOCKS = 256
def cipher_clocking(register_b, register_s, counter):
fx = register_s[counter % REGISTER_SIZE] \
^ register_s[(LAST_INDEX - (counter + 7)) % REGISTER_SIZE] \
^ register_s[(LAST_INDEX - (counter + 38)) % REGISTER_SIZE] \
^ register_s[(LAST_INDEX - (counter + 70)) % REGISTER_SIZE] \
^ register_s[(LAST_INDEX - (counter + 81)) % REGISTER_SIZE] \
^ register_s[(LAST_INDEX - (counter + 93)) % REGISTER_SIZE]
gx = register_s[(LAST_INDEX - counter) % REGISTER_SIZE] ^ register_b[(LAST_INDEX - counter) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 26)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 56)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 91)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 96)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 3)) % REGISTER_SIZE] and register_b[
(LAST_INDEX - (counter + 67)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 11)) % REGISTER_SIZE] and register_b[
(LAST_INDEX - (counter + 13)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 17)) % REGISTER_SIZE] and register_b[
(LAST_INDEX - (counter + 18)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 27)) % REGISTER_SIZE] and register_b[
(LAST_INDEX - (counter + 59)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 40)) % REGISTER_SIZE] and register_b[
(LAST_INDEX - (counter + 48)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 61)) % REGISTER_SIZE] and register_b[
(LAST_INDEX - (counter + 65)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 68)) % REGISTER_SIZE] and register_b[
(LAST_INDEX - (counter + 64)) % REGISTER_SIZE]
hx = register_b[(LAST_INDEX - (counter + 12)) % REGISTER_SIZE] and register_s[
(LAST_INDEX - (counter + 8)) % REGISTER_SIZE] \
^ register_s[(LAST_INDEX - (counter + 13)) % REGISTER_SIZE] and register_s[
(LAST_INDEX - (counter + 20)) % REGISTER_SIZE] \
^ register_b[(LAST_INDEX - (counter + 95)) % REGISTER_SIZE] and register_s[
(LAST_INDEX - (counter + 42)) % REGISTER_SIZE] \
^ register_s[(LAST_INDEX - (counter + 60)) % REGISTER_SIZE] and register_s[
(LAST_INDEX - (counter + 79)) % REGISTER_SIZE] \
^ register_s[(LAST_INDEX - (counter + 12)) % REGISTER_SIZE] and register_b[
(LAST_INDEX - (counter + 95)) % REGISTER_SIZE] and register_s[
(LAST_INDEX - (counter + 95)) % REGISTER_SIZE]
A = [15,36,45,64,73,89]
z = register_b[(LAST_INDEX - (counter + 2)) % REGISTER_SIZE]
print(register_b)
print(register_s)
#for j in A:
#z = z ^ register_b[(LAST_INDEX - (counter + A[j])) % REGISTER_SIZE]
z = z ^ hx ^ register_s[(LAST_INDEX - (counter + 93)) % REGISTER_SIZE]
return fx, gx, z
def initialization(nfsr, lfsr):
for i in range(0,INITIALIZATION_CLOCKS):
fx_i, gx_i, zi = cipher_clocking(nfsr, lfsr, i)
nfsr = nfsr << 1; nfsr[LAST_INDEX] = fx_i ^ zi;
lfsr = lfsr << 1; lfsr[LAST_INDEX] = gx_i ^ zi;
return nfsr, lfsr
def cipher(nfsr, lfsr):
hx_0, gx_0, z_0 = cipher_clocking(nfsr, lfsr, 0)
output = z_0
for i in range(1, KEY_SIZE):
fx_i, gx_i, zi = cipher_clocking(nfsr, lfsr, i)
nfsr = nfsr << 1; nfsr[LAST_INDEX] = fx_i;
lfsr = lfsr << 1; lfsr[LAST_INDEX] = gx_i;
output = output + zi
print(output)
return output
if __name__ == '__main__':
# Define number of rounds
NO_ROUNDS = 25
ONES_PADDING = BitArray('0xFFFFFFFF')
# Test vector of GRAIN(128 bit key)
# IV Key Keystream
# 000000000000000000000000 00000000000000000000000000000000 0fd9deefeb6fad437bf43fce35849cfe Big-Endian
# 000000000000000000000000 00000000000000000000000000000000 db032aff3788498b57cb894fffb6bb96 Little-Endian
# =====================================
# 0123456789abcdef12345678 0123456789abcdef123456789abcdef0 f09b7bf7d7f6b5c2de2ffc73ac21397f Big-Endian
# 0123456789abcdef12345678 0123456789abcdef123456789abcdef0 afb5babfa8de896b4b9c6acaf7c4fbfd Little-Endian
# =====================================
initialization_vector = BitArray('0x000000000000000000000000')
key = BitArray('0x00000000000000000000000000000000')
# Key and IV Initialization
nfsr = key[:]
lfsr = ONES_PADDING + initialization_vector[:]
nfsr, lfsr = initialization(nfsr, lfsr)
keystream = cipher(nfsr, lfsr)
print("IV: ", initialization_vector.hex)
print("key: ", key.hex)
print("keystream: ", keystream.hex)
| 46.240741 | 114 | 0.623148 | 571 | 4,994 | 5.164623 | 0.192644 | 0.134283 | 0.211597 | 0.14649 | 0.556121 | 0.550017 | 0.52628 | 0.310275 | 0.113937 | 0.113937 | 0 | 0.109371 | 0.245695 | 4,994 | 107 | 115 | 46.672897 | 0.67348 | 0.154385 | 0 | 0.026316 | 0 | 0 | 0.023283 | 0.014255 | 0 | 0 | 0.016631 | 0 | 0 | 1 | 0.039474 | false | 0 | 0.013158 | 0 | 0.092105 | 0.078947 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77c7afef5c74a339ce18ec675fedbac6397faeca | 2,111 | py | Python | microanalyst/model/filenames.py | bzaczynski/microanalyst | 915d8b3111b98a5e369dbcfe0b88c84ade9faec8 | [
"MIT"
] | 3 | 2017-06-06T18:54:28.000Z | 2021-06-11T13:06:40.000Z | microanalyst/model/filenames.py | bzaczynski/microanalyst | 915d8b3111b98a5e369dbcfe0b88c84ade9faec8 | [
"MIT"
] | null | null | null | microanalyst/model/filenames.py | bzaczynski/microanalyst | 915d8b3111b98a5e369dbcfe0b88c84ade9faec8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# The MIT License (MIT)
#
# Copyright (c) 2013 Bartosz Zaczynski
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from microanalyst.commons import osutils
from microanalyst.model.commons import flatten
class Filenames(object):
"""Helper class for filename handling."""
def __init__(self, json_data):
self.filenames = _process(json_data)
def get(self, with_path, iteration):
"""Return a flat list of filenames in their original order."""
def normalize(filenames):
if not with_path:
return [osutils.basename(x) for x in filenames]
return filenames
if iteration is not None:
return normalize(list(self.filenames[iteration]))
else:
return normalize(flatten(self.filenames))
def _process(json_data):
"""Return a list of filenames grouped by iteration."""
iterations = []
for iteration in json_data[u'iterations']:
filenames = [x[u'filename'] for x in iteration[u'spreadsheets']]
iterations.append(filenames)
return iterations
| 36.396552 | 79 | 0.720512 | 290 | 2,111 | 5.203448 | 0.482759 | 0.058317 | 0.01723 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002392 | 0.207958 | 2,111 | 57 | 80 | 37.035088 | 0.90012 | 0.588347 | 0 | 0 | 0 | 0 | 0.036058 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.1 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
77c83499cfbfd5ad6ff4654b86b77b358a42fede | 7,864 | py | Python | models/abstract_model.py | kkangshen/bayesian-deep-rul | 449038571097cfccee5e128623a16a963a4dca63 | [
"MIT"
] | 6 | 2020-02-28T14:56:46.000Z | 2022-03-24T02:44:56.000Z | models/abstract_model.py | kkangshen/bayesian-deep-rul | 449038571097cfccee5e128623a16a963a4dca63 | [
"MIT"
] | null | null | null | models/abstract_model.py | kkangshen/bayesian-deep-rul | 449038571097cfccee5e128623a16a963a4dca63 | [
"MIT"
] | 4 | 2020-01-02T15:03:02.000Z | 2022-03-10T12:46:35.000Z | # -*- coding: utf-8 -*-
"""Abstract model definition."""
from collections import OrderedDict
import numpy as np
import torch
import torch.nn as nn
class AbstractModel(nn.Module):
def __init__(self, input_size):
"""
Parameters
----------
module : ModuleType
Module defining the model.
input_size : (int, int, int)
Input size.
"""
super(AbstractModel, self).__init__()
self.input_size = input_size
self.criterion = nn.MSELoss(reduction="sum")
self.layers = None
def forward(self, x):
"""Forward pass.
Parameters
----------
x : Tensor
Input sample.
Returns
-------
Tensor
Output label.
"""
if not self.layers:
raise NotImplementedError
self.kl = 0
for layer in self.layers:
out = layer(x)
if len(out) == 2: # TODO: improve
# Bayesian
x, _kl = out
self.kl += _kl
else:
# frequentist
x = out
return x.view(-1)
def loss(self, pred, label, beta=0):
"""Compute loss.
Parameters
----------
pred : Tensor
Predicted label.
label : Tensor
True label.
beta : float, optional
Beta factor.
Returns
-------
Tensor
Loss.
"""
if not self.layers:
raise NotImplementedError
likelihood = -0.5 * self.criterion(pred, label)
complexity = beta * self.kl if beta != 0 else 0
return complexity - likelihood
def get_weight_statistics(self):
"""Extract weight statistics for later visualization (bias not used).
Returns
-------
([str], [ndarray], [ndarray])
List of layer names,
list of 1D array of `float` representing layer weight means,
list of 1D array of `float` representing layer weight standard deviations.
"""
if not self.layers:
raise NotImplementedError
names = []
qmeans = []
qstds = []
for idx, layer in enumerate(self.layers):
if hasattr(layer, "qw_mean") and hasattr(layer, "log_alpha"):
names.append(str(layer.__class__).split(".")[-1].split("'")[0] + "-" + str(idx + 1))
qmeans.append(layer.qw_mean.detach().cpu().numpy())
qstds.append(np.sqrt(np.exp(layer.log_alpha.detach().cpu().numpy()) * (layer.qw_mean.detach().cpu().numpy() ** 2)))
else:
trainable_params = [param.detach().cpu().numpy() for param in layer.parameters() if param.requires_grad]
if len(trainable_params) > 0:
weights = np.asarray(trainable_params[0])
names.append(str(layer.__class__).split(".")[-1].split("'")[0] + "-" + str(idx + 1))
qmeans.append(weights)
qstds.append(np.zeros(weights.shape))
return names, qmeans, qstds
# Source code modified from:
# Title: sksq96/pytorch-summary
# Author: Shubham Chandel (sksq96)
# Date: 2018
# Availability: https://github.com/sksq96/pytorch-summary/tree/b50f213f38544ac337beeeda93b03c7e48e69c78
def summary(self, log_fn, batch_size=-1, device="cuda"):
"""Log model summary.
Parameters
----------
log_fn : callable
Logging function.
batch_size : int, optional
Batch size.
device : string, optional
Device.
Returns
-------
int
Number of trainable parameters.
"""
if not self.layers:
raise NotImplementedError
def register_hook(module):
def hook(module, input, output):
class_name = str(module.__class__).split(".")[-1].split("'")[0]
module_idx = len(summary)
m_key = "%s-%i" % (class_name, module_idx + 1)
summary[m_key] = OrderedDict()
summary[m_key]["input_shape"] = list(input[0].size())
summary[m_key]["input_shape"][0] = batch_size
if isinstance(output, (list, tuple)):
summary[m_key]["output_shape"] = [[-1] + list(o.size())[1 :] for o in output][0]
else:
summary[m_key]["output_shape"] = list(output.size())
summary[m_key]["output_shape"][0] = batch_size
params = 0
if hasattr(module, "params_count") and callable(module.params_count):
params += module.params_count()
summary[m_key]["trainable"] = True
if hasattr(module, "weight") and hasattr(module.weight, "size"):
params += torch.prod(torch.LongTensor(list(module.weight.size())))
summary[m_key]["trainable"] = module.weight.requires_grad
if hasattr(module, "bias") and hasattr(module.bias, "size"):
params += torch.prod(torch.LongTensor(list(module.bias.size())))
summary[m_key]["nb_params"] = params
if (
not isinstance(module, nn.Sequential)
and not isinstance(module, nn.ModuleList)
and not (module == self)
):
hooks.append(module.register_forward_hook(hook))
device = device.lower()
assert device in [
"cuda",
"cpu",
], "Input device is not valid, please specify 'cuda' or 'cpu'."
if device == "cuda" and torch.cuda.is_available():
dtype = torch.cuda.FloatTensor
else:
dtype = torch.FloatTensor
# multiple inputs to the network
#if isinstance(input_size, tuple):
#input_size = [input_size]
# batch_size of 2 for batchnorm
#x = [torch.rand(2, *in_size).type(dtype) for in_size in input_size]
x = torch.rand(1, *self.input_size).type(dtype)
# create properties
summary = OrderedDict()
hooks = []
# register hook
self.apply(register_hook)
# make a forward pass
#self(*x)
self(x)
# remove these hooks
for h in hooks:
h.remove()
log_fn("________________________________________________________________")
line_new = "{:>20} {:>25} {:>15}".format("Layer (type)", "Output Shape", "Param #")
log_fn(line_new)
log_fn("================================================================")
total_params = 0
trainable_params = 0
line_count = 0
for layer in summary:
# input_shape, output_shape, trainable, nb_params
line_new = "{:>20} {:>25} {:>15}".format(
layer,
str(summary[layer]["output_shape"]),
"{0:,}".format(summary[layer]["nb_params"]),
)
total_params += summary[layer]["nb_params"]
if "trainable" in summary[layer]:
if summary[layer]["trainable"] == True:
trainable_params += summary[layer]["nb_params"]
log_fn(line_new)
line_count += 1
log_fn("================================================================")
log_fn("Total params: {0:,}".format(total_params))
log_fn("Trainable params: {0:,}".format(trainable_params))
log_fn("Non-trainable params: {0:,}".format(total_params - trainable_params))
log_fn("________________________________________________________________")
return trainable_params
| 33.042017 | 131 | 0.517294 | 798 | 7,864 | 4.798246 | 0.245614 | 0.014364 | 0.028728 | 0.01567 | 0.20632 | 0.140507 | 0.086707 | 0.074171 | 0.051188 | 0.028728 | 0 | 0.017071 | 0.344481 | 7,864 | 237 | 132 | 33.181435 | 0.725703 | 0.192014 | 0 | 0.166667 | 0 | 0 | 0.115764 | 0.043582 | 0 | 0 | 0 | 0.004219 | 0.008333 | 1 | 0.058333 | false | 0 | 0.033333 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77c8d398ac338ba0bf2a0226bf85b373294388f7 | 394 | py | Python | weiboCrawler/migrations/0020_auto_20210501_0731.py | SongYuQiu/Social-Network-Portrait-Analysis-System-BackCode | 392764ec4944f4df2b9a2cdc9901f9cf32918988 | [
"MIT"
] | null | null | null | weiboCrawler/migrations/0020_auto_20210501_0731.py | SongYuQiu/Social-Network-Portrait-Analysis-System-BackCode | 392764ec4944f4df2b9a2cdc9901f9cf32918988 | [
"MIT"
] | null | null | null | weiboCrawler/migrations/0020_auto_20210501_0731.py | SongYuQiu/Social-Network-Portrait-Analysis-System-BackCode | 392764ec4944f4df2b9a2cdc9901f9cf32918988 | [
"MIT"
] | null | null | null | # Generated by Django 2.2 on 2021-05-01 07:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('weiboCrawler', '0019_weibouser_profile_image_url'),
]
operations = [
migrations.RenameField(
model_name='weibouser',
old_name='profile_image_url',
new_name='profile_url',
),
]
| 20.736842 | 61 | 0.619289 | 42 | 394 | 5.571429 | 0.690476 | 0.102564 | 0.128205 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.06338 | 0.279188 | 394 | 18 | 62 | 21.888889 | 0.760563 | 0.109137 | 0 | 0 | 1 | 0 | 0.232092 | 0.091691 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
77c9573c06e85833053d03d6dbce3da891ac8dbe | 4,316 | py | Python | ezql.py | Dimwest/MyEzQL | 866c7e853cf605d475e8204a7465f6e596bcf2d8 | [
"MIT"
] | 10 | 2019-01-18T15:54:35.000Z | 2019-02-21T16:18:41.000Z | ezql.py | Dimwest/MyEzQL | 866c7e853cf605d475e8204a7465f6e596bcf2d8 | [
"MIT"
] | 1 | 2020-08-08T20:50:00.000Z | 2020-08-08T20:50:00.000Z | ezql.py | Dimwest/MyEzQL | 866c7e853cf605d475e8204a7465f6e596bcf2d8 | [
"MIT"
] | null | null | null | import fire
from configparser import ConfigParser
from utils.processing import str_to_sql_dict
from utils.validation import *
from utils.logging import *
from parse.worker import Worker
from output.cmd import beautify
from output.mermaid import Mermaid
from output.json import to_json
from typing import Optional, List
from pathlib import Path
class MyEzQl(object):
def parse(self, i: str, ds: Optional[str]=None, dl: Optional[str]=None,
pmode: Optional[str]=None, chart: Optional[str]=None,
json: Optional[str]=None, tables: Optional[List[str]]=None,
procedures: Optional[List[str]]=None,
fmode: Optional[str]=None, v: Optional[str]=None) -> None:
"""
Core function parsing input file or directory and pretty-printing results
in the terminal.
Provides various parsing and output options to tweak according to needs.
:param i: path to input .sql file or directory containing .sql files
:param ds: default schema, can be set in config.py for convenience purpose
:param dl: delimiter, defaults to ;;
:param pmode: parsing mode, can be 'procedure' or 'ddl'
:param chart: path to output .html flowchart, defaults to '', in which case
no output file is created
:param json: path to output .json file, defaults to '', in which case
no output file is created
:param tables: list of table names to filter on, only the parents
and children of these table(s) will be kept in the outputs.
Procedures filtering has precedence over tables filtering.
:param procedures: list of procedure names to filter on, only the
statements located inside the selected procedure(s) will be kept
in outputs. Procedures filtering has precedence over tables filtering.
:param fmode: filtering mode, can be 'simple' or 'rec'
:param v: verbosity level, which will ultimately set the DEBUG output level.
Must be one of ('v', 'vv', 'vvv', 'vvvv'), defaults to None, resulting in
logging.INFO logger level
"""
# Read config
cfg = ConfigParser()
cfg.read(f'{Path(__file__).parent}/config.ini')
# Set default schema to config value if not provided
ds = cfg['parser_config']['default_schema'] if not ds else ds
# Set delimiter to config value if not provided
dl = cfg['parser_config']['delimiter'] if not dl else dl
# Set parsing mode to config value if not provided
pmode = cfg['parser_config']['default_parsing_mode'] if not pmode else pmode
fmode = cfg['parser_config']['default_filter_mode'] if not fmode else fmode
v = cfg['parser_config']['default_verbosity'] if not v else v
validate_args(i, chart, json, tables, procedures, pmode, fmode, v)
set_verbosity(v)
logger.warning(f'\nStart parsing with parameters:'
f'\n\n default schema --> {ds}'
f'\n delimiter --> {dl}'
f'\n parsing mode --> {pmode}'
f"\n filter mode --> {fmode if tables or procedures else 'off'} "
f"\n{' -> on procedure(s) ' + str(procedures) if procedures else ''}"
f"\n{' -> on table(s) ' + str(tables) if tables else ''}")
# Configure and run parser
worker = Worker(default_schema=ds, delimiter=dl, pmode=pmode, fmode=fmode)
worker.run(i)
# If procedure filter defined, apply filtering to results
if procedures:
procedures = str_to_sql_dict(procedures)
worker.procedures_filter(procedures)
# If tables filter defined, apply filtering to results
if tables:
tables = str_to_sql_dict(tables)
worker.tables_filter(tables)
# Pretty print results in terminal
beautify(worker.results)
# Print errored files if existing
worker.execution_warnings()
# If .html flowchart output required, create it
if chart:
m = Mermaid(worker.results)
m.tables_chart(chart)
if json:
to_json(worker.results, json)
if __name__ == '__main__':
fire.Fire(MyEzQl)
| 37.530435 | 95 | 0.630908 | 560 | 4,316 | 4.789286 | 0.273214 | 0.02349 | 0.03915 | 0.032811 | 0.165548 | 0.155854 | 0.110365 | 0.082028 | 0.082028 | 0.035048 | 0 | 0 | 0.284291 | 4,316 | 114 | 96 | 37.859649 | 0.868242 | 0.377896 | 0 | 0 | 0 | 0 | 0.199037 | 0.013644 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02 | false | 0 | 0.22 | 0 | 0.26 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
77ca100659083b3c68df3e832abc6d77f7424c6b | 254 | py | Python | tests/pygetwindow/pygetwindow.py | JakubAndrysek/pyspacenavigator-app | 97e8049b646cd734f37b50fb2a60169a3b0d34f8 | [
"MIT"
] | null | null | null | tests/pygetwindow/pygetwindow.py | JakubAndrysek/pyspacenavigator-app | 97e8049b646cd734f37b50fb2a60169a3b0d34f8 | [
"MIT"
] | null | null | null | tests/pygetwindow/pygetwindow.py | JakubAndrysek/pyspacenavigator-app | 97e8049b646cd734f37b50fb2a60169a3b0d34f8 | [
"MIT"
] | null | null | null |
from win32gui import GetWindowText, GetForegroundWindow
old = ""
while True:
new =GetWindowText(GetForegroundWindow())
if old != new:
# print(new)
if new.lower().find("inkscape")>0:
print("OK")
old = new
| 15.875 | 55 | 0.586614 | 26 | 254 | 5.730769 | 0.615385 | 0.42953 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016667 | 0.291339 | 254 | 15 | 56 | 16.933333 | 0.811111 | 0.03937 | 0 | 0 | 0 | 0 | 0.041494 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0.125 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |