hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
57a8347fb5fe5ce96de561337d38d644465b4b88
| 360
|
py
|
Python
|
terrascript/data/linode.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/data/linode.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/data/linode.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/data/linode.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:21:10 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.data.linode
#
# instead of
#
# >>> import terrascript.data.linode.linode
#
# This is only available for 'official' and 'partner' providers.
from terrascript.data.linode.linode import *
| 24
| 73
| 0.730556
| 49
| 360
| 5.367347
| 0.693878
| 0.228137
| 0.319392
| 0.205323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038835
| 0.141667
| 360
| 14
| 74
| 25.714286
| 0.812298
| 0.802778
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
57b0ea10cf19cf67f70ba6e2c0726dffa6a21fcb
| 3,786
|
py
|
Python
|
tasks/LR4BD.py
|
evgeniy97/taskgenerator
|
7680989c2a080761ef574fac148a0a94c722ad16
|
[
"MIT"
] | 1
|
2018-07-19T09:56:35.000Z
|
2018-07-19T09:56:35.000Z
|
tasks/LR4BD.py
|
evgeniy97/taskgenerator
|
7680989c2a080761ef574fac148a0a94c722ad16
|
[
"MIT"
] | null | null | null |
tasks/LR4BD.py
|
evgeniy97/taskgenerator
|
7680989c2a080761ef574fac148a0a94c722ad16
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# In[ ]:
"""
Numerical Methods, lab 4
"""
import sympy as sy
from sympy import Rational as syR
from sympy import exp, sin, cos, sqrt, log, ln
from sympy import pi, cot, sinh, cosh, atan, tan
# Это был Task_db
Tasks_db = {
'Task1':
[
# 4.1.1
{'f1': lambda x1,x2: sin(x1+x2)-x2-1.2,
'f2': lambda x1,x2: 2*x1+cos(x2)-2,
},
# 4.1.2
{'f1': lambda x1,x2: cos(x1-1)+x2-0.5,
'f2': lambda x1,x2: sin(x1)+2*x2-2,
},
# 4.1.3
{'f1': lambda x1,x2: sin(x1)+x2-2,
'f2': lambda x1,x2: cos(x1)+x2-1.5,
},
# 4.1.4
{'f1': lambda x1,x2: cos(x1)+x2-1.5,
'f2': lambda x1,x2: 2*x1-sin(x2-0.5)-1,
},
# 4.1.5
{'f1': lambda x1,x2: sin(x1+1.5)-x2+2.9,
'f2': lambda x1,x2: cos(x2-2)+x1,
},
# 4.1.6
{'f1': lambda x1,x2: cos(x1+0.5)+x2-0.8,
'f2': lambda x1,x2: sin(x2)-2*x1-1.6,
},
# 4.1.7
{'f1': lambda x1,x2: sin(x1-1)+x2-0.1,
'f2': lambda x1,x2: x1-sin(x2+1)-0.8,
},
# 4.1.8
{'f1': lambda x1,x2: cos(x1+x2)+2*x2,
'f2': lambda x1,x2: x1+sin(x2)-0.6,
},
# 4.1.9
{'f1': lambda x1,x2: cos(x1+0.5)-x2-2,
'f2': lambda x1,x2: sin(x2)-2*x1-1,
},
# 4.1.10
{'f1': lambda x1,x2: sin(x1+x2)-x2-1.5,
'f2': lambda x1,x2: x1+cos(x2-0.5)-0.5,
},
# 4.1.11
{'f1': lambda x1,x2: sin(x2+1)-x1-1.2,
'f2': lambda x1,x2: 2*x1**2+x2-2,
},
# 4.1.12
{'f1': lambda x1,x2: cos(x2-1)+x1-0.5,
'f2': lambda x1,x2: x2-cos(x1)-3,
},
# 4.1.13
{'f1': lambda x1,x2: tan(x1*x2+0.4)-x1**2,
'f2': lambda x1,x2: 0.6*x1**2+2*x2**2-1,
},
# 4.1.14
{'f1': lambda x1,x2: sin(x1+x2)-1.6*x1-1,
'f2': lambda x1,x2: x1**2+x2**2-1,
},
# 4.1.15
{'f1': lambda x1,x2: tan(x1*x2+0.1)-x1**2,
'f2': lambda x1,x2: x1**2+2*x2**2-1,
},
# 4.1.16
{'f1': lambda x1,x2: sin(0.5*x1+x2)-1.2*x1-1,
'f2': lambda x1,x2: x1**2+x2**2-1,
},
# 4.1.17
{'f1': lambda x1,x2: tan(x1*x2+0.3)-x1**2,
'f2': lambda x1,x2: 0.9*x1**2+2*x2**2-1,
},
# 4.1.18
{'f1': lambda x1,x2: sin(x1+x2)-1.3*x1-1,
'f2': lambda x1,x2: x1**2+0.2*x2**2-1,
},
# 4.1.19
{'f1': lambda x1,x2: tan(x1*x2)-x1**2,
'f2': lambda x1,x2: 0.8*x1**2+2*x2**2-1,
},
# 4.1.20
{'f1': lambda x1,x2: sin(x1+x2)-1.5*x1-0.1,
'f2': lambda x1,x2: 3*x1**2+x2**2-1,
},
# 4.1.21
{'f1': lambda x1,x2: tan(x1*x2)-x1**2,
'f2': lambda x1,x2: 0.7*x1**2+2*x2**2-1,
},
# 4.1.22
{'f1': lambda x1,x2: sin(x1+x2)-1.2*x1-0.1,
'f2': lambda x1,x2: x1**2+x2**2-1,
},
# 4.1.23
{'f1': lambda x1,x2: tan(x1*x2+0.2)-x1**2,
'f2': lambda x1,x2: 0.6*x1**2+2*x2**2-1,
},
# 4.1.24
{'f1': lambda x1,x2: sin(x1+x2)-x1+0.1,
'f2': lambda x1,x2: 2x2-cos(3*x1)+0.1,
},
# 4.1.25
{'f1': lambda x1,x2: cos(x1+0.5)+x2-1,
'f2': lambda x1,x2: sin(x2)-2*x1-2,
},
# 4.1.26
{'f1': lambda x1,x2: cos(x2-2)+x1,
'f2': lambda x1,x2: sin(x1+0.5)-x2+2.9,
},
# 4.1.27
{'f1': lambda x1,x2: sin(x1-1)+x2-1.5,
'f2': lambda x1,x2: x1-sin(x2-1)-1,
},
# 4.1.28
{'f1': lambda x1,x2: sin(x2+1)-x1-1,
'f2': lambda x1,x2: 2*x2+cos(x1)-0.5,
},
# 4.1.29
{'f1': lambda x1,x2: cos(x2-1)+x1-0.8,
'f2': lambda x1,x2: x2-cos(x1)-2,
},
# 4.1.30
{'f1': lambda x1,x2: cos(x1-1)+x2-1,
'f2': lambda x1,x2: sin(x2)+2*x1-1.6,
},
]
}
| 26.475524
| 53
| 0.421025
| 735
| 3,786
| 2.165986
| 0.092517
| 0.19598
| 0.376884
| 0.226131
| 0.800879
| 0.776382
| 0.716709
| 0.563442
| 0.297739
| 0.165829
| 0
| 0.239199
| 0.327522
| 3,786
| 142
| 54
| 26.661972
| 0.386096
| 0.062599
| 0
| 0.070707
| 0
| 0
| 0.035909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.040404
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
17c864b08650e1c4a6b55d8a699e81af008c20de
| 387
|
py
|
Python
|
xnmt/specialized_encoders/segmenting_encoder/__init__.py
|
neulab/xnmt
|
d93f8f3710f986f36eb54e9ff3976a6b683da2a4
|
[
"Apache-2.0"
] | 195
|
2017-05-27T11:23:40.000Z
|
2021-09-28T06:03:24.000Z
|
xnmt/specialized_encoders/segmenting_encoder/__init__.py
|
neulab/xnmt
|
d93f8f3710f986f36eb54e9ff3976a6b683da2a4
|
[
"Apache-2.0"
] | 386
|
2017-05-25T23:22:19.000Z
|
2020-05-03T13:57:28.000Z
|
xnmt/specialized_encoders/segmenting_encoder/__init__.py
|
neulab/xnmt
|
d93f8f3710f986f36eb54e9ff3976a6b683da2a4
|
[
"Apache-2.0"
] | 53
|
2017-05-23T17:45:18.000Z
|
2021-04-18T12:36:37.000Z
|
import logging
seg_logger = logging.getLogger('segment')
import xnmt.specialized_encoders.segmenting_encoder.segmenting_encoder
import xnmt.specialized_encoders.segmenting_encoder.segmenting_composer
import xnmt.specialized_encoders.segmenting_encoder.length_prior
import xnmt.specialized_encoders.segmenting_encoder.priors
import xnmt.specialized_encoders.segmenting_encoder.reporter
| 38.7
| 71
| 0.901809
| 45
| 387
| 7.444444
| 0.355556
| 0.304478
| 0.313433
| 0.432836
| 0.746269
| 0.746269
| 0.334328
| 0
| 0
| 0
| 0
| 0
| 0.043928
| 387
| 9
| 72
| 43
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0.018135
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
17fdb851e21e37326d62d60e05d47ab467ff466c
| 48,984
|
py
|
Python
|
PBR_Perfect/__init__.py
|
grpnpraveen/PBR_Perfect-Add-on
|
ababe5cefc967c75ccfec708244e54a6cceec03d
|
[
"MIT"
] | null | null | null |
PBR_Perfect/__init__.py
|
grpnpraveen/PBR_Perfect-Add-on
|
ababe5cefc967c75ccfec708244e54a6cceec03d
|
[
"MIT"
] | null | null | null |
PBR_Perfect/__init__.py
|
grpnpraveen/PBR_Perfect-Add-on
|
ababe5cefc967c75ccfec708244e54a6cceec03d
|
[
"MIT"
] | null | null | null |
bl_info = {
"name": "PBR Perfect",
"author": "Gali_Ravi_Praveen",
"version": (1, 0),
"blender": (2, 91, 2),
"location": "View3D > Toolshelf",
"description": "Adds a new Shader to your Object",
"warning": "",
"doc_url": "",
"category": "Add Shader",
}
import bpy
import bmesh
import os
images_path=["1","2","3","4","5","6","7"]
#Custom properties
class MyProperties(bpy.types.PropertyGroup):
mat_string: bpy.props.StringProperty(name="Name")
height_strength: bpy.props.FloatProperty(name="Normal_map Strength",min=1,max=10,default=1.0)
efficiency_strength: bpy.props.EnumProperty(name="Bump Efficiency",description="your choice",items=[('OP1',"Medium",""),('OP2',"High","")])
render_engine: bpy.props.EnumProperty(name="Render Engine",description="important",items=[('OP1',"Eevee",""),('OP2',"Cycles","")])
shape:bpy.props.EnumProperty(name="shape",description="imp",items=[('NP',"Not a Square thing",""),('P',"Square thing","")])
# PANEL DESIGN
class ShaderMainPanel(bpy.types.Panel):
bl_label = "PBR Perfect"
bl_idname = "SHADER_PT_MAINPANEL"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'PBR Perfect'
def draw(self, context):
layout = self.layout
scene=context.scene
mytool=scene.my_tool
row=layout.row()
row.label(text="Select Suitable Texture Maps.")
row=layout.row()
row.label(text=None,icon="SHADING_TEXTURE")
row.prop(mytool,"mat_string") #one
row=layout.row()
row.scale_x=2.6 #map name
row.label(text="Albedo Map")
if images_path[0]!="1":
row.label(text=os.path.basename(images_path[0]),icon="FILE_IMAGE")
row.scale_x=2.1 #open button
row.operator('shader.albedo_operator',text="open",icon="FILEBROWSER")
row.scale_x=1 #cross button
row.operator('shader.1cancel_operator',text="",icon="CANCEL") #need to write separate operator
row=layout.row() #two
row.scale_x=2.6 #map name
row.label(text="Normal Map")
if images_path[1]!="2":
row.label(text=os.path.basename(images_path[1]),icon="FILE_IMAGE")
row.scale_x=2.1 #open button
row.operator('shader.normal_operator',text="open",icon="FILEBROWSER")
row.scale_x=1 #cross button
row.operator('shader.2cancel_operator',text="",icon="CANCEL") #need to write separate operator
row=layout.row()
row.prop(mytool,"height_strength") #three
row=layout.row()
row.scale_x=2.6 #map name
row.label(text="Roughness Map")
if images_path[2]!="3":
row.label(text=os.path.basename(images_path[2]),icon="FILE_IMAGE")
row.scale_x=2.1 #open button
row.operator('shader.roughness_operator',text="open",icon="FILEBROWSER")
row.scale_x=1 #cross button
row.operator('shader.3cancel_operator',text="",icon="CANCEL") #need to write separate operator
row=layout.row() #four
row.scale_x=2.6 #map name
row.label(text="Ambient occlusion Map")
if images_path[3]!="4":
row.label(text=os.path.basename(images_path[3]),icon="FILE_IMAGE")
row.scale_x=2.1 #open button
row.operator('shader.ambient_operator',text="open",icon="FILEBROWSER")
row.scale_x=1 #cross button
row.operator('shader.4cancel_operator',text="",icon="CANCEL") #need to write separate operator
row=layout.row() #five
row.scale_x=2.6 #map name
row.label(text="Metallic Map")
if images_path[4]!="5":
row.label(text=os.path.basename(images_path[4]),icon="FILE_IMAGE")
row.scale_x=2.1 #open button
row.operator('shader.metallic_operator',text="open",icon="FILEBROWSER")
row.scale_x=1 #cross button
row.operator('shader.5cancel_operator',text="",icon="CANCEL") #need to write separate operator
row=layout.row() #six
row.scale_x=2.6 #map name
row.label(text="Height Map")
if images_path[5]!="6":
row.label(text=os.path.basename(images_path[5]),icon="FILE_IMAGE")
row.scale_x=2.1 #open button
row.operator('shader.height_operator',text="open",icon="FILEBROWSER")
row.scale_x=1 #cross button
row.operator('shader.6cancel_operator',text="",icon="CANCEL") #need to write separate operator
row=layout.row() #seven
row.scale_x=0 #efficiency name
row.prop(mytool,"efficiency_strength") #efficiency dropdown
row=layout.row()
row.scale_x=1.6 #map name
row.label(text="Specular Map")
if images_path[6]!="7":
row.label(text=os.path.basename(images_path[6]),icon="FILE_IMAGE")
row.scale_x=2.1 #open button
row.operator('shader.specular_operator',text="open",icon="FILEBROWSER")
row.scale_x=1 #cross button
row.operator('shader.7cancel_operator',text="",icon="CANCEL") #need to write separate operator
row=layout.row()
row.prop(mytool,"render_engine")
row=layout.row()
if mytool.efficiency_strength=='OP2':
if mytool.render_engine=='OP1':
row.prop(mytool,"shape")
row=layout.row()
row.scale_y=1.8
row.operator('shader.material_operator',text="Create Material",icon="BRUSH_SOFTEN")
# MATERIAL ------------NODE SETUP
class Material(bpy.types.Operator):
bl_label="open"
bl_idname='shader.material_operator'
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
scene=context.scene
mytool=scene.my_tool
so=bpy.context.active_object
new_material=bpy.data.materials.new(name=mytool.mat_string)
so.data.materials.append(new_material)
new_material.use_nodes=True
new_material.use_backface_culling = True
new_material.blend_method = 'CLIP'
new_material.shadow_method = 'CLIP'
nodes=new_material.node_tree.nodes #accesiing all the nodes of new_material
links=new_material.node_tree.links
material_output=nodes.get("Material Output")
principle_bsdf=nodes.get("Principled BSDF")
principle_bsdf.inputs[8].default_value = 0.35
uv_map_node=nodes.new(type='ShaderNodeUVMap')
uv_map_node.location=(-2600,300)
uv_map_node.uv_map="UVMap"
mapping_node=nodes.new(type='ShaderNodeMapping') #mapping node
mapping_node.inputs[3].default_value[0] = 1
mapping_node.inputs[3].default_value[1] = 1
mapping_node.inputs[3].default_value[2] = 1
mapping_node.location=(-2400,300)
material_output.location=(700,300)
uvlink_to_mapping=links.new(uv_map_node.outputs[0],mapping_node.inputs[0])
if images_path[0]!="1" and images_path[3]=="4": #only albedo
node_one=nodes.new(type='ShaderNodeTexImage')
bpy.data.images.load(images_path[0], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[0]))
node_one.location=(-800,540)
node_one.image=tex
node_one.label="Albedo Map"
bpy.data.images[os.path.basename(images_path[0])].colorspace_settings.name='sRGB'
new_link=links.new(node_one.outputs[0],principle_bsdf.inputs[0]) #link btwn albedo and pbsdf
map_to_albe=links.new(mapping_node.outputs[0],node_one.inputs[0])
if images_path[0]=="1" and images_path[3]!="4": #only ambient
node_two=nodes.new(type='ShaderNodeTexImage')
bpy.data.images.load(images_path[3], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[3]))
node_two.image=tex
node_two.label="Ambient Occlusion Map"
bpy.data.images[os.path.basename(images_path[3])].colorspace_settings.name='Non-Color'
node_two.location=(-1100,700) #ambient occlusion texture
node_three=nodes.new(type='ShaderNodeAmbientOcclusion') #ambient occlusion map
node_three.location=(-300,500)
node_four=nodes.new(type='ShaderNodeValToRGB') #color ramp
node_four.location=(-700,600)
node_four.color_ramp.elements[0].position= 0.32
amb_color_link=links.new(node_two.outputs[0],node_four.inputs[0]) #link btwn ambient and colorramp
coloramp_amb_link=links.new(node_four.outputs[0],node_three.inputs[0])
amb_to_pbsdf=links.new(node_three.outputs[0],principle_bsdf.inputs[0])
map_to_ambi=links.new(mapping_node.outputs[0],node_two.inputs[0])
if images_path[0]!="1" and images_path[3]!="4":
node_one=nodes.new(type='ShaderNodeTexImage')
bpy.data.images.load(images_path[0], check_existing=True) #both albedo and ambient
tex = bpy.data.images.get(os.path.basename(images_path[0]))
node_one.image=tex
node_one.label="Albedo Map"
bpy.data.images[os.path.basename(images_path[0])].colorspace_settings.name='sRGB'
node_one.location=(-1000,420) #albedo
node_two=nodes.new(type='ShaderNodeTexImage')
bpy.data.images.load(images_path[3], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[3]))
node_two.image=tex
node_two.label="Ambient Occlusion Map"
bpy.data.images[os.path.basename(images_path[3])].colorspace_settings.name='Non-Color'
node_two.location=(-1100,700) #ambient occlusion
node_three=nodes.new(type='ShaderNodeMixRGB')
node_three.location=(-400,500) #mixrgb
node_three.blend_type='MULTIPLY'
albedo_to_multiply_link=links.new(node_one.outputs[0],node_three.inputs[1]) #albedo to multiply
ambient_multiply_link1=links.new(node_two.outputs[0],node_three.inputs[2]) #ambient to multiply
ambient_multiply_link2=links.new(node_two.outputs[1],node_three.inputs[0]) #multiply to pBSDF
multi_to_princile_link=links.new(node_three.outputs[0],principle_bsdf.inputs[0])
map_to_albe=links.new(mapping_node.outputs[0],node_one.inputs[0]) #map to ambien and albedo
map_to_ambient=links.new(mapping_node.outputs[0],node_two.inputs[0])
if images_path[1]!="2" and images_path[5]=="6":
node_one=nodes.new(type='ShaderNodeTexImage') #only normal map
bpy.data.images.load(images_path[1], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[1]))
node_one.image=tex
node_one.label="Normal Texture"
bpy.data.images[os.path.basename(images_path[1])].colorspace_settings.name='Non-Color'
node_one.location=(-690,-300) #only NORMAL texture
node_two=nodes.new(type='ShaderNodeNormalMap') #normal map
node_two.uv_map='UVMap'
node_two.inputs[0].default_value =mytool.height_strength
node_two.location=(-300,-300)
link_to_normal=links.new(node_one.outputs[0],node_two.inputs[1])
link_from_normal_to_princi=links.new(node_two.outputs[0],principle_bsdf.inputs[20])
map_to_normtexture=links.new(mapping_node.outputs[0],node_one.inputs[0])
if images_path[1]=="2" and images_path[5]!="6":
if mytool.efficiency_strength=='OP1': #when low efficiency
node_three=nodes.new(type='ShaderNodeTexImage') # onlyheight texture
bpy.data.images.load(images_path[5], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[5]))
node_three.image=tex
node_three.label="Height Map"
bpy.data.images[os.path.basename(images_path[5])].colorspace_settings.name='Non-Color'
node_three.location=(-690,-280)
node_four=nodes.new(type='ShaderNodeBump') #BUMP texture
node_four.location=(-200,-180)
node_five=nodes.new(type='ShaderNodeRGBToBW') #rgb to black and white
node_five.location=(-425,-230)
height_to_rgbw=links.new(node_three.outputs[0],node_five.inputs[0])
rgbw_to_bump=links.new(node_five.outputs[0],node_four.inputs[2])
bump_to_pbsdf=links.new(node_four.outputs[0],principle_bsdf.inputs[20])
map_to_heighttex=links.new(mapping_node.outputs[0],node_three.inputs[0])
if mytool.efficiency_strength=='OP2': #when high efficiency
if mytool.render_engine=='OP2': #incycles
node_three=nodes.new(type='ShaderNodeTexImage') #height texture
gg=bpy.data.images.load(images_path[5], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[5]))
node_three.image=tex
node_three.label="Height Map"
bpy.data.images[os.path.basename(images_path[5])].colorspace_settings.name='Non-Color'
node_three.location=(200,200)
node_four=nodes.new(type='ShaderNodeDisplacement') #displace node
node_four.inputs[1].default_value=0.2
node_four.inputs[2].default_value=0.5
node_four.location=(490,200)
heigh_displ=links.new(node_three.outputs[0],node_four.inputs[0])
displ_materout=links.new(node_four.outputs[0],material_output.inputs[2])
map_to_disp=links.new(mapping_node.outputs[0],node_three.inputs[0])
mod_displace=so.modifiers.new("displace",'DISPLACE')
new_texture=bpy.data.textures.new("image",'IMAGE')
new_texture.image=gg
mod_displace.texture=new_texture
mod_subdivi=so.modifiers.new("subsurf",'SUBSURF')
mod_subdivi.subdivision_type = 'SIMPLE'
bpy.context.scene.render.engine = 'CYCLES'
bpy.context.scene.cycles.feature_set = 'EXPERIMENTAL'
bpy.context.object.active_material.cycles.displacement_method = 'BOTH'
bpy.context.scene.cycles.preview_dicing_rate = 1
bpy.context.object.cycles.use_adaptive_subdivision = True
if mytool.render_engine=='OP1': #in Eevee
node_three=nodes.new(type='ShaderNodeTexImage') #height texture
gg=bpy.data.images.load(images_path[5], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[5]))
node_three.image=tex
node_three.label="Height Map"
bpy.data.images[os.path.basename(images_path[5])].colorspace_settings.name='Non-Color'
node_three.location=(200,200)
map_to_disp=links.new(mapping_node.outputs[0],node_three.inputs[0])#link from mapping to displ tex
# Eevee displacement NODE GROUP
bpy.ops.mesh.uv_texture_add()
# so.data.uv_layers.new(name='hello') use incase uv map already there with tha name uvmap.001
bpy.ops.object.editmode_toggle()
bpy.ops.uv.select_all(action='SELECT')
me = so.data
bm = bmesh.from_edit_mesh(me)
uv_layer = bm.loops.layers.uv.verify() #accessing uv map
for face in bm.faces:
for loop in face.loops:
loop_uv = loop[uv_layer]
loop_uv.uv = (0,0)
bmesh.update_edit_mesh(me)
so.modifiers.new("subsurf",'SUBSURF')
bpy.context.object.modifiers["subsurf"].render_levels = 1
if mytool.shape=='P':
bpy.context.object.modifiers["subsurf"].subdivision_type = 'SIMPLE'
so.modifiers.new("array",'ARRAY')
bpy.context.object.modifiers["array"].use_relative_offset = False
bpy.context.object.modifiers["array"].show_in_editmode = False
bpy.context.object.modifiers["array"].count = 70
bpy.context.object.modifiers["array"].offset_u = 0.0001
mod_displace=so.modifiers.new("displace",'DISPLACE')
new_texture=bpy.data.textures.new("blend",'BLEND')
new_texture.use_clamp = False
new_texture.use_color_ramp = True
new_texture.color_ramp.elements[0].color=(0,0,0,1)
new_texture.color_ramp.elements[1].position=0.01
new_texture.color_ramp.elements[1].color=(1,1,1,0)
#-----------------here used name of the uv map instead active
mod_displace.texture=new_texture
bpy.context.object.modifiers["displace"].texture_coords = 'UV'
bpy.context.object.modifiers["displace"].uv_layer = "UVMap.001"
bpy.context.object.modifiers["displace"].strength = 0.2
bpy.context.object.modifiers["displace"].mid_level = 0.2
so.modifiers.new("weld",'WELD')
bpy.ops.object.editmode_toggle()
#uv
node_uv=nodes.new(type='ShaderNodeUVMap')
node_uv.uv_map="UVMap.001"
node_uv.location=(900,300)
#mul1
node_multiply1=nodes.new(type='ShaderNodeMath')
node_multiply1.operation='MULTIPLY'
node_multiply1.inputs[1].default_value=-1.0
node_multiply1.location=(900,100)
#mul2
node_multiply2=nodes.new(type='ShaderNodeMath')
node_multiply2.operation='MULTIPLY'
node_multiply2.inputs[1].default_value=-1.0
node_multiply2.location=(900,-100)
#mul3
node_multiply3=nodes.new(type='ShaderNodeMath')
node_multiply3.operation='MULTIPLY'
node_multiply3.inputs[1].default_value=-1.0
node_multiply3.location=(1100,100)
#mul4
node_multiply4=nodes.new(type='ShaderNodeMath')
node_multiply4.operation='MULTIPLY'
node_multiply4.inputs[1].default_value=1000
node_multiply4.use_clamp=False
node_multiply4.location=(1300,200)
#separatexyz
node_separatexyz=nodes.new(type='ShaderNodeSeparateXYZ')
node_separatexyz.location=(1100,300)
#add1
node_add1=nodes.new(type='ShaderNodeMath')
node_add1.operation='ADD'
node_add1.inputs[1].default_value=1.0
node_add1.location=(1100,-100)
#add2
node_add2=nodes.new(type='ShaderNodeMath')
node_add2.operation='ADD'
node_add2.location=(1300,-50)
#mixrgb
node_mixrgb=nodes.new(type='ShaderNodeMixRGB')
node_mixrgb.location=(1550,50)
#lessthan
node_less=nodes.new('ShaderNodeMath')
node_less.operation='LESS_THAN'
node_less.inputs[1].default_value = 0.001
node_less.location=(1800,125)
#greaterthan
node_greater=nodes.new('ShaderNodeMath')
node_greater.operation='GREATER_THAN'
node_greater.location=(1800,300)
#subtract
node_sub=nodes.new(type='ShaderNodeMath')
node_sub.operation='SUBTRACT'
node_sub.use_clamp=True
node_sub.location=(2000,213)
#mix shader
node_mixshad=nodes.new(type='ShaderNodeMixShader')
node_mixshad.location=(2200,120)
#transparentbsdf
node_transp=nodes.new(type='ShaderNodeBsdfTransparent')
node_transp.location=(2000,20)
#strength
node_stren=nodes.new(type='ShaderNodeValue')
node_stren.label="Strength"
node_stren.outputs[0].default_value=15
node_stren.location=(600,30)
#midlevel
node_mid=nodes.new(type='ShaderNodeValue')
node_mid.label="Midlevel"
node_mid.outputs[0].default_value=-2.4
node_mid.location=(600,-50)
#linking now
uv_sep=links.new(node_uv.outputs[0],node_separatexyz.inputs[0])
sep_mul4=links.new(node_separatexyz.outputs[0],node_multiply4.inputs[0])
mul4_mixrgb=links.new(node_multiply4.outputs[0],node_mixrgb.inputs[1])
mul1_mul3=links.new(node_multiply1.outputs[0],node_multiply3.inputs[1])
mul2_add1=links.new(node_multiply2.outputs[0],node_add1.inputs[0])
add1_add2=links.new(node_add1.outputs[0],node_add2.inputs[1])
mul3_add2=links.new(node_multiply3.outputs[0],node_add2.inputs[0])
add2_mixrgb=links.new(node_add2.outputs[0],node_mixrgb.inputs[2])
mixrgb_greater=links.new(node_mixrgb.outputs[0],node_greater.inputs[0])
mul4_less=links.new(node_multiply4.outputs[0],node_less.inputs[0])
great_sub=links.new(node_greater.outputs[0],node_sub.inputs[0])
less_sub=links.new(node_less.outputs[0],node_sub.inputs[1])
sub_mixshad=links.new(node_sub.outputs[0],node_mixshad.inputs[0])
transp_mixshad=links.new(node_transp.outputs[0],node_mixshad.inputs[2])
#link part2
displatex_mul3=links.new(node_three.outputs[0],node_multiply3.inputs[0])
principle_mixshad=links.new(principle_bsdf.outputs[0],node_mixshad.inputs[1])
mixshad_material=links.new(node_mixshad.outputs[0],material_output.inputs[0])
material_output.location=(2380,120)
stren_mul1=links.new(node_stren.outputs[0],node_multiply1.inputs[0])
mid_mul2=links.new(node_mid.outputs[0],node_multiply2.inputs[0])
if images_path[1]!="2" and images_path[5]!="6": #if normal and displ selected
if mytool.efficiency_strength=='OP1': #both normal and displacemnet
node_one=nodes.new(type='ShaderNodeTexImage') # normal texture
bpy.data.images.load(images_path[1], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[1]))
node_one.image=tex
node_one.label="Normal Texture"
bpy.data.images[os.path.basename(images_path[1])].colorspace_settings.name='Non-Color'
node_one.location=(-890,-550)
node_two=nodes.new(type='ShaderNodeNormalMap') #normal map
node_two.uv_map='UVMap'
node_two.inputs[0].default_value =mytool.height_strength
node_two.location=(-520,-520)
node_three=nodes.new(type='ShaderNodeTexImage') #height texture
bpy.data.images.load(images_path[5], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[5]))
node_three.image=tex
node_three.label="Height Map"
bpy.data.images[os.path.basename(images_path[5])].colorspace_settings.name='Non-Color'
node_three.location=(-690,-280)
node_four=nodes.new(type='ShaderNodeBump') #BUMP texture
node_four.location=(-200,-180)
node_five=nodes.new(type='ShaderNodeRGBToBW') #rgb to black and white
node_five.location=(-425,-230)
nmap_to_bump=links.new(node_two.outputs[0],node_four.inputs[5])
normal_link_to_nmap=links.new(node_one.outputs[0],node_two.inputs[1])
height_to_rgbw=links.new(node_three.outputs[0],node_five.inputs[0])
rgbw_to_bump=links.new(node_five.outputs[0],node_four.inputs[2])
bump_to_pbsdf=links.new(node_four.outputs[0],principle_bsdf.inputs[20])
map_to_nortex=links.new(mapping_node.outputs[0],node_one.inputs[0])
map_to_heighttex=links.new(mapping_node.outputs[0],node_three.inputs[0])
if mytool.efficiency_strength=='OP2': #high cycles
if mytool.render_engine=='OP2':
node_one=nodes.new(type='ShaderNodeTexImage') #first normal tex
bpy.data.images.load(images_path[1], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[1]))
node_one.image=tex
node_one.label="Normal Texture"
bpy.data.images[os.path.basename(images_path[1])].colorspace_settings.name='Non-Color'
node_one.location=(-690,-300)
node_two=nodes.new(type='ShaderNodeNormalMap') #normal map
node_two.uv_map='UVMap'
node_two.inputs[0].default_value =mytool.height_strength
node_two.location=(-300,-300)
link_to_normal=links.new(node_one.outputs[0],node_two.inputs[1])
link_from_normal_to_princi=links.new(node_two.outputs[0],principle_bsdf.inputs[20])
map_to_normtexture=links.new(mapping_node.outputs[0],node_one.inputs[0])
node_three=nodes.new(type='ShaderNodeTexImage') #height texture
gg=bpy.data.images.load(images_path[5], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[5]))
node_three.image=tex
node_three.label="Height Map"
bpy.data.images[os.path.basename(images_path[5])].colorspace_settings.name='Non-Color'
node_three.location=(200,200)
node_four=nodes.new(type='ShaderNodeDisplacement') #displace node
node_four.inputs[1].default_value=0.2
node_four.inputs[2].default_value=0.5
node_four.location=(490,200)
heigh_displ=links.new(node_three.outputs[0],node_four.inputs[0])
displ_materout=links.new(node_four.outputs[0],material_output.inputs[2])
map_to_disp=links.new(mapping_node.outputs[0],node_three.inputs[0])
mod_displace=so.modifiers.new("displace",'DISPLACE')
new_texture=bpy.data.textures.new("image",'IMAGE')
new_texture.image=gg
mod_displace.texture=new_texture
mod_subdivi=so.modifiers.new("subsurf",'SUBSURF')
mod_subdivi.subdivision_type = 'SIMPLE'
bpy.context.scene.render.engine = 'CYCLES'
bpy.context.scene.cycles.feature_set = 'EXPERIMENTAL'
bpy.context.object.active_material.cycles.displacement_method = 'BOTH'
bpy.context.scene.cycles.preview_dicing_rate = 1
bpy.context.object.cycles.use_adaptive_subdivision = True
if mytool.render_engine=='OP1':
node_one=nodes.new(type='ShaderNodeTexImage') # normal tex
bpy.data.images.load(images_path[1], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[1]))
node_one.image=tex
node_one.label="Normal Texture"
bpy.data.images[os.path.basename(images_path[1])].colorspace_settings.name='Non-Color'
node_one.location=(-690,-300) # NORMAL texture
node_two=nodes.new(type='ShaderNodeNormalMap') #normal map
node_two.uv_map='UVMap'
node_two.inputs[0].default_value =mytool.height_strength
node_two.location=(-300,-300)
link_to_normal=links.new(node_one.outputs[0],node_two.inputs[1])
link_from_normal_to_princi=links.new(node_two.outputs[0],principle_bsdf.inputs[20])
map_to_normtexture=links.new(mapping_node.outputs[0],node_one.inputs[0])
node_three=nodes.new(type='ShaderNodeTexImage') #height texture
gg=bpy.data.images.load(images_path[5], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[5]))
node_three.image=tex
node_three.label="Height Map"
bpy.data.images[os.path.basename(images_path[5])].colorspace_settings.name='Non-Color'
node_three.location=(200,200)
map_to_disp=links.new(mapping_node.outputs[0],node_three.inputs[0])#link from mapping to displ tex
# CUSTOM NODE GROUP
bpy.ops.mesh.uv_texture_add()
bpy.ops.object.editmode_toggle()
bpy.ops.uv.select_all(action='SELECT')
me = so.data
bm = bmesh.from_edit_mesh(me)
uv_layer = bm.loops.layers.uv.verify()
for face in bm.faces:
for loop in face.loops:
loop_uv = loop[uv_layer]
loop_uv.uv = (0,0)
bmesh.update_edit_mesh(me)
so.modifiers.new("subsurf",'SUBSURF')
bpy.context.object.modifiers["subsurf"].render_levels = 1
if mytool.shape=='P':
bpy.context.object.modifiers["subsurf"].subdivision_type = 'SIMPLE'
so.modifiers.new("array",'ARRAY')
bpy.context.object.modifiers["array"].use_relative_offset = False
bpy.context.object.modifiers["array"].show_in_editmode = False
bpy.context.object.modifiers["array"].count = 70
bpy.context.object.modifiers["array"].offset_u = 0.0001
mod_displace=so.modifiers.new("displace",'DISPLACE')
new_texture=bpy.data.textures.new("blend",'BLEND')
new_texture.use_clamp = False
new_texture.use_color_ramp = True
new_texture.color_ramp.elements[0].color=(0,0,0,1)
new_texture.color_ramp.elements[1].position=0.01
new_texture.color_ramp.elements[1].color=(1,1,1,0)
#-----------------here used name of the uv map instead active
mod_displace.texture=new_texture
bpy.context.object.modifiers["displace"].texture_coords = 'UV'
bpy.context.object.modifiers["displace"].uv_layer = "UVMap.001" #same here see above comment when only displacement when highe evee
bpy.context.object.modifiers["displace"].strength = 0.2
bpy.context.object.modifiers["displace"].mid_level = 0.2
so.modifiers.new("weld",'WELD')
bpy.ops.object.editmode_toggle()
#uv
node_uv=nodes.new(type='ShaderNodeUVMap')
node_uv.uv_map="UVMap.001"
node_uv.location=(900,300)
#mul1
node_multiply1=nodes.new(type='ShaderNodeMath')
node_multiply1.operation='MULTIPLY'
node_multiply1.inputs[1].default_value=-1.0
node_multiply1.location=(900,100)
#mul2
node_multiply2=nodes.new(type='ShaderNodeMath')
node_multiply2.operation='MULTIPLY'
node_multiply2.inputs[1].default_value=-1.0
node_multiply2.location=(900,-100)
#mul3
node_multiply3=nodes.new(type='ShaderNodeMath')
node_multiply3.operation='MULTIPLY'
node_multiply3.inputs[1].default_value=-1.0
node_multiply3.location=(1100,100)
#mul4
node_multiply4=nodes.new(type='ShaderNodeMath')
node_multiply4.operation='MULTIPLY'
node_multiply4.inputs[1].default_value=1000
node_multiply4.use_clamp=False
node_multiply4.location=(1300,200)
#separatexyz
node_separatexyz=nodes.new(type='ShaderNodeSeparateXYZ')
node_separatexyz.location=(1100,300)
#add1
node_add1=nodes.new(type='ShaderNodeMath')
node_add1.operation='ADD'
node_add1.inputs[1].default_value=1.0
node_add1.location=(1100,-100)
#add2
node_add2=nodes.new(type='ShaderNodeMath')
node_add2.operation='ADD'
node_add2.location=(1300,-50)
#mixrgb
node_mixrgb=nodes.new(type='ShaderNodeMixRGB')
node_mixrgb.location=(1550,50)
#lessthan
node_less=nodes.new('ShaderNodeMath')
node_less.operation='LESS_THAN'
node_less.inputs[1].default_value = 0.001
node_less.location=(1800,125)
#greaterthan
node_greater=nodes.new('ShaderNodeMath')
node_greater.operation='GREATER_THAN'
node_greater.location=(1800,300)
#subtract
node_sub=nodes.new(type='ShaderNodeMath')
node_sub.operation='SUBTRACT'
node_sub.use_clamp=True
node_sub.location=(2000,213)
#mix shader
node_mixshad=nodes.new(type='ShaderNodeMixShader')
node_mixshad.location=(2200,120)
#transparentbsdf
node_transp=nodes.new(type='ShaderNodeBsdfTransparent')
node_transp.location=(2000,20)
#strength
node_stren=nodes.new(type='ShaderNodeValue')
node_stren.label="Strength"
node_stren.outputs[0].default_value=15
node_stren.location=(600,30)
#midlevel
node_mid=nodes.new(type='ShaderNodeValue')
node_mid.label="Midlevel"
node_mid.outputs[0].default_value=-2.4
node_mid.location=(600,-50)
#linking now
uv_sep=links.new(node_uv.outputs[0],node_separatexyz.inputs[0])
sep_mul4=links.new(node_separatexyz.outputs[0],node_multiply4.inputs[0])
mul4_mixrgb=links.new(node_multiply4.outputs[0],node_mixrgb.inputs[1])
mul1_mul3=links.new(node_multiply1.outputs[0],node_multiply3.inputs[1])
mul2_add1=links.new(node_multiply2.outputs[0],node_add1.inputs[0])
add1_add2=links.new(node_add1.outputs[0],node_add2.inputs[1])
mul3_add2=links.new(node_multiply3.outputs[0],node_add2.inputs[0])
add2_mixrgb=links.new(node_add2.outputs[0],node_mixrgb.inputs[2])
mixrgb_greater=links.new(node_mixrgb.outputs[0],node_greater.inputs[0])
mul4_less=links.new(node_multiply4.outputs[0],node_less.inputs[0])
great_sub=links.new(node_greater.outputs[0],node_sub.inputs[0])
less_sub=links.new(node_less.outputs[0],node_sub.inputs[1])
sub_mixshad=links.new(node_sub.outputs[0],node_mixshad.inputs[0])
transp_mixshad=links.new(node_transp.outputs[0],node_mixshad.inputs[2])
#link part2
displatex_mul3=links.new(node_three.outputs[0],node_multiply3.inputs[0])
principle_mixshad=links.new(principle_bsdf.outputs[0],node_mixshad.inputs[1])
mixshad_material=links.new(node_mixshad.outputs[0],material_output.inputs[0])
material_output.location=(2380,120)
stren_mul1=links.new(node_stren.outputs[0],node_multiply1.inputs[0])
mid_mul2=links.new(node_mid.outputs[0],node_multiply2.inputs[0])
if images_path[2]!="3":
node_one=nodes.new(type='ShaderNodeTexImage') #roughness map
bpy.data.images.load(images_path[2], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[2]))
node_one.image=tex
node_one.label="Roughness Map"
bpy.data.images[os.path.basename(images_path[2])].colorspace_settings.name='Non-Color'
node_one.location=(-300,-10)
roughtex_to_principle=links.new(node_one.outputs[0],principle_bsdf.inputs[7]) #rough map to principle bsdf
map_to_roughness=links.new(mapping_node.outputs[0],node_one.inputs[0])
if images_path[4]!="5":
node_one=nodes.new(type='ShaderNodeTexImage') #metallic map
bpy.data.images.load(images_path[4], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[4]))
node_one.image=tex
node_one.label="Metallic Map"
bpy.data.images[os.path.basename(images_path[4])].colorspace_settings.name='Non-Color'
node_one.location=(-730,300)
metallic_to_principle=links.new(node_one.outputs[0],principle_bsdf.inputs[4]) #metallic to princi
map_to_metalic=links.new(mapping_node.outputs[0],node_one.inputs[0])
if images_path[6]!="7":
node_one=nodes.new(type='ShaderNodeTexImage')
bpy.data.images.load(images_path[6], check_existing=True)
tex = bpy.data.images.get(os.path.basename(images_path[6]))
node_one.image=tex
node_one.label="Specular Map"
bpy.data.images[os.path.basename(images_path[6])].colorspace_settings.name='Non-Color'
node_one.location=(-680,30)
specular_to_principle_link=links.new(node_one.outputs[0],principle_bsdf.inputs[5])#specular ti principle
map_to_specular=links.new(mapping_node.outputs[0],node_one.inputs[0])
return {'FINISHED'}
# OPERATORS
class Albedo_Map(bpy.types.Operator): #one
bl_label="open"
bl_idname='shader.albedo_operator'
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[0]=self.filepath
bpy.utils.unregister_class(ShaderMainPanel)
bpy.utils.register_class(ShaderMainPanel)
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class Normal_Map(bpy.types.Operator): #two
bl_label="open"
bl_idname='shader.normal_operator'
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[1]=self.filepath
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class Roughness_Map(bpy.types.Operator): #three
bl_label="open"
bl_idname='shader.roughness_operator'
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[2]=self.filepath
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class Ambient_Map(bpy.types.Operator): #four
bl_label="open"
bl_idname='shader.ambient_operator'
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[3]=self.filepath
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class Metallic_Map(bpy.types.Operator): #five
bl_label="open"
bl_idname='shader.metallic_operator'
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[4]=self.filepath
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class Height_Map(bpy.types.Operator): #six
bl_label="open"
bl_idname='shader.height_operator'
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[5]=self.filepath
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class Specular_Map(bpy.types.Operator): #seven
bl_label="open"
bl_idname='shader.specular_operator'
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[6]=self.filepath
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
# image cancel selection
class AlbedoCancel_Map(bpy.types.Operator): #one
bl_label="open"
bl_idname='shader.1cancel_operator'
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[0]="1"
bpy.utils.unregister_class(ShaderMainPanel)
bpy.utils.register_class(ShaderMainPanel)
return {'FINISHED'}
class NormalCancel_Map(bpy.types.Operator): #two
bl_label="open"
bl_idname='shader.2cancel_operator'
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[1]="2"
return {'FINISHED'}
class RoughnessCancel_Map(bpy.types.Operator): #three
bl_label="open"
bl_idname='shader.3cancel_operator'
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[2]="3"
return {'FINISHED'}
class AmbientCancel_Map(bpy.types.Operator): #four
bl_label="open"
bl_idname='shader.4cancel_operator'
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[3]="4"
return {'FINISHED'}
class MetallicCancel_Map(bpy.types.Operator): #five
bl_label="open"
bl_idname='shader.5cancel_operator'
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[4]="5"
return {'FINISHED'}
class HeightCancel_Map(bpy.types.Operator): #six
bl_label="open"
bl_idname='shader.6cancel_operator'
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[5]="6"
return {'FINISHED'}
class SpecularCancel_Map(bpy.types.Operator): #seven
bl_label="open"
bl_idname='shader.7cancel_operator'
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
images_path[6]="7"
return {'FINISHED'}
# final reg and unregis
classes=[Material,AlbedoCancel_Map,SpecularCancel_Map,NormalCancel_Map,HeightCancel_Map,ShaderMainPanel,MetallicCancel_Map,AmbientCancel_Map,RoughnessCancel_Map,Albedo_Map,Normal_Map,Roughness_Map,Ambient_Map,Metallic_Map,Height_Map,Specular_Map,MyProperties] #need to add different maps names
def register():
for clas in classes:
bpy.utils.register_class(clas)
bpy.types.Scene.my_tool=bpy.props.PointerProperty(type=MyProperties)
def unregister():
for clas in classes:
bpy.utils.unregister_class(clas)
del bpy.types.Scene.my_tool
if __name__ == "__main__":
register()
| 55.038202
| 296
| 0.561347
| 5,514
| 48,984
| 4.797606
| 0.079616
| 0.035911
| 0.030846
| 0.030997
| 0.853671
| 0.836244
| 0.819838
| 0.803999
| 0.785325
| 0.777992
| 0
| 0.033172
| 0.329802
| 48,984
| 889
| 297
| 55.100112
| 0.77264
| 0.05612
| 0
| 0.742894
| 0
| 0
| 0.088887
| 0.018941
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05168
| false
| 0
| 0.005168
| 0.01938
| 0.187339
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4ea363dce2567dc99fc41955dab9b82b8dd59aa
| 1,754
|
py
|
Python
|
util/logo.py
|
WooQi57/cassie-run
|
9aac12e3a69a011735540d9f5711b8f06da9af81
|
[
"MIT"
] | 36
|
2019-10-01T22:50:12.000Z
|
2022-02-09T06:17:16.000Z
|
util/logo.py
|
WooQi57/cassie-run
|
9aac12e3a69a011735540d9f5711b8f06da9af81
|
[
"MIT"
] | 5
|
2019-11-26T02:35:39.000Z
|
2020-11-29T23:20:48.000Z
|
util/logo.py
|
WooQi57/cassie-run
|
9aac12e3a69a011735540d9f5711b8f06da9af81
|
[
"MIT"
] | 24
|
2019-09-23T19:26:48.000Z
|
2022-02-14T14:04:18.000Z
|
class color:
BOLD = '\033[1m\033[48m'
END = '\033[0m'
ORANGE = '\033[38;5;202m'
BLACK = '\033[38;5;240m'
def print_logo(subtitle="", option=2):
print()
print(color.BOLD + color.ORANGE + " .8. " + color.BLACK + " 8 888888888o " + color.ORANGE + "8 8888888888 `8.`8888. ,8' ")
print(color.BOLD + color.ORANGE + " .888. " + color.BLACK + " 8 8888 `88. " + color.ORANGE + "8 8888 `8.`8888. ,8' ")
print(color.BOLD + color.ORANGE + " :88888. " + color.BLACK + " 8 8888 `88 " + color.ORANGE + "8 8888 `8.`8888. ,8' ")
print(color.BOLD + color.ORANGE + " . `88888. " + color.BLACK + " 8 8888 ,88 " + color.ORANGE + "8 8888 `8.`8888.,8' ")
print(color.BOLD + color.ORANGE + " .8. `88888. " + color.BLACK + " 8 8888. ,88' " + color.ORANGE + "8 888888888888 `8.`88888' ")
print(color.BOLD + color.ORANGE + " .8`8. `88888. " + color.BLACK + " 8 888888888P' " + color.ORANGE + "8 8888 .88.`8888. ")
print(color.BOLD + color.ORANGE + " .8' `8. `88888. " + color.BLACK + " 8 8888 " + color.ORANGE + "8 8888 .8'`8.`8888. ")
print(color.BOLD + color.ORANGE + " .8' `8. `88888. " + color.BLACK + " 8 8888 " + color.ORANGE + "8 8888 .8' `8.`8888. ")
print(color.BOLD + color.ORANGE + " .888888888. `88888. " + color.BLACK + " 8 8888 " + color.ORANGE + "8 8888 .8' `8.`8888. ")
print(color.BOLD + color.ORANGE + ".8' `8. `88888." + color.BLACK + " 8 8888 " + color.ORANGE + "8 888888888888 .8' `8.`8888. " + color.END)
print("\n")
print(subtitle)
print("\n")
| 76.26087
| 164
| 0.486887
| 217
| 1,754
| 3.930876
| 0.142857
| 0.134818
| 0.225088
| 0.222743
| 0.739742
| 0.709261
| 0.677608
| 0.677608
| 0.641266
| 0.601407
| 0
| 0.247267
| 0.322121
| 1,754
| 22
| 165
| 79.727273
| 0.470143
| 0
| 0
| 0.2
| 0
| 0
| 0.424173
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0
| 0
| 0.3
| 0.75
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
a4f98a21be477121112ffc2dcf774f1f247d2c22
| 1,606
|
py
|
Python
|
tests/8-parse/ex_parse_layout.py
|
JCoetzee123/spira
|
dae08feba1578ecc8745b45109f4fb7bef374546
|
[
"MIT"
] | null | null | null |
tests/8-parse/ex_parse_layout.py
|
JCoetzee123/spira
|
dae08feba1578ecc8745b45109f4fb7bef374546
|
[
"MIT"
] | null | null | null |
tests/8-parse/ex_parse_layout.py
|
JCoetzee123/spira
|
dae08feba1578ecc8745b45109f4fb7bef374546
|
[
"MIT"
] | null | null | null |
import os
import spira.all as spira
from spira.yevon import io
from copy import copy, deepcopy
from spira.technologies.aist.rdd.database import RDD
if __name__ == '__main__':
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/dff.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/and.gds'
file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_rotated.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_reflected.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_hierarchy.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_hierarchy_lvl3.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_hierarchy_lvl3_rotation.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_hierarchy_lvl3_reflection.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_hierarchy_lvl4.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_hierarchy_lvl4_rotation.gds'
# file_name = '/home/therealtyler/code/phd/spira/spira/technologies/aist/layouts/stable/jj_hierarchy_lvl4_reflection.gds'
D = io.import_gds(filename=file_name)
D.gdsii_output()
| 51.806452
| 125
| 0.781445
| 225
| 1,606
| 5.391111
| 0.173333
| 0.182193
| 0.225062
| 0.237428
| 0.821105
| 0.821105
| 0.821105
| 0.821105
| 0.821105
| 0.821105
| 0
| 0.004093
| 0.087173
| 1,606
| 30
| 126
| 53.533333
| 0.823329
| 0.742839
| 0
| 0
| 0
| 0.111111
| 0.2175
| 0.1975
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
35336938a5f1e15a25bad542c30c12f7543e95dc
| 15,181
|
py
|
Python
|
pyrankability/plot.py
|
IGARDS/ranking_toolbox
|
98e2d318c76c92d91bb2c0481efe9879cd3614db
|
[
"MIT"
] | null | null | null |
pyrankability/plot.py
|
IGARDS/ranking_toolbox
|
98e2d318c76c92d91bb2c0481efe9879cd3614db
|
[
"MIT"
] | 2
|
2022-02-07T19:56:51.000Z
|
2022-02-07T20:03:58.000Z
|
pyrankability/plot.py
|
IGARDS/ranking_toolbox
|
98e2d318c76c92d91bb2c0481efe9879cd3614db
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
import pandas as pd
import altair as alt
from pylab import rcParams
from .common import *
alt.data_transformers.disable_max_rows()
from networkx.drawing.nx_agraph import graphviz_layout, to_agraph
import pygraphviz as pgv
from IPython.display import Image
def draw(A):
return Image(A.draw(format='png', prog='dot'))
def D_as_graph(D,file=None):
G = nx.DiGraph()
for i in D.index:
for j in D.columns:
if D.loc[i,j] != 0:
G.add_edge(i,j,width=D.loc[i,j],label=D.loc[i,j])
A = to_agraph(G)
A.layout('dot')
if file is not None:
A.draw(file)
return draw(A)
# Given something like:
# A = [4, 10, 1, 12, 3, 9, 0, 6, 5, 11, 2, 8, 7]
# B = [5, 4, 10, 1, 7, 6, 12, 3, 9, 0, 11, 2, 8]
def AB_to_P2(A,B):
P2 = pd.DataFrame(np.array([A,B]))
return P2
def spider3(perm1,perm2,file=None,fig_format="PNG",width=5,height=10,font_size=8,xmult = 2,ymult=1.2):
assert len(perm1) == len(perm2)
assert type(perm1) == pd.Series
assert type(perm2) == pd.Series
assert perm1.name != perm2.name
rcParams['figure.figsize'] = width, height
#rcParams['figure.constrained_layout.h_pad'] = 5
#plt.tight_layout()
plt.clf()
G = nx.Graph()
pos = {}
buffer = 0.25
step = (2-2*buffer)/len(perm1)
labels={}
y1 = []
y2 = []
y = []
index = []
for i in range(len(perm1)):
name1 = f"{perm1.name}:{perm1.iloc[i]}"
name2 = f"{perm2.name}:{perm2.iloc[i]}"
G.add_node(name1)
G.add_node(name2)
loc = 1-buffer-(i*step)
pos[name1] = np.array([-1,loc])
pos[name2] = np.array([1,loc])
labels[name1] = perm1.index[i]
labels[name2] = perm2.index[i]
y1.append(name1)
y2.append(name2)
y.append("A")
y.append("B")
index.append(name1)
index.append(name2)
y=pd.Series(y,index=index)
for i in range(len(perm1)):
name1 = f"{perm1.name}:{perm1.iloc[i]}"
ix = np.where(perm1.iloc[i] == perm2)[0][0]
name2 = f"{perm2.name}:{perm2.iloc[ix]}"
G.add_edge(name1, name2)
edges = G.edges()
nx.draw_networkx_labels(G,pos=pos,labels=labels,font_size=font_size)
color_map = y.map({"A":"white","B":"white"})
nx.draw(G, pos, node_color=color_map)
xmax= xmult*max(xx for xx,yy in pos.values())
ymax= ymult*max(yy for xx,yy in pos.values())
plt.xlim(-xmax,xmax)
plt.ylim(-ymax,ymax)
#A = to_agraph(G)
#A.layout('dot')
#nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels)
if file is not None:
plt.savefig(file)
def spider2(perm1,perm2,file=None,fig_format="PNG",width=5,height=10,font_size=8,xmult = 2,ymult=1.2):
assert len(perm1) == len(perm2)
assert type(perm1) == pd.Series
assert type(perm2) == pd.Series
assert perm1.name != perm2.name
rcParams['figure.figsize'] = width, height
#rcParams['figure.constrained_layout.h_pad'] = 5
#plt.tight_layout()
plt.clf()
G = nx.Graph()
pos = {}
buffer = 0.25
step = (2-2*buffer)/len(perm1)
labels={}
y1 = []
y2 = []
y = []
index = []
for i in range(len(perm1)):
name1 = f"{perm1.name}:{perm1.loc[i]}"
name2 = f"{perm2.name}:{perm2.loc[i]}"
G.add_node(name1)
G.add_node(name2)
loc = 1-buffer-(i*step)
pos[name1] = np.array([-1,loc])
pos[name2] = np.array([1,loc])
labels[name1] = perm1.loc[i]
labels[name2] = perm2.loc[i]
y1.append(name1)
y2.append(name2)
y.append("A")
y.append("B")
index.append(name1)
index.append(name2)
y=pd.Series(y,index=index)
for i in range(len(perm1)):
name1 = f"{perm1.name}:{perm1.loc[i]}"
ix = np.where(perm1.loc[i] == perm2)[0][0]
name2 = f"{perm2.name}:{perm2.loc[ix]}"
G.add_edge(name1, name2)
edges = G.edges()
nx.draw_networkx_labels(G,pos=pos,labels=labels,font_size=font_size)
color_map = y.map({"A":"white","B":"white"})
nx.draw(G, pos, node_color=color_map)
xmax= xmult*max(xx for xx,yy in pos.values())
ymax= ymult*max(yy for xx,yy in pos.values())
plt.xlim(-xmax,xmax)
plt.ylim(-ymax,ymax)
#A = to_agraph(G)
#A.layout('dot')
#nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels)
if file is not None:
plt.savefig(file)
def spider(P2,file=None,fig_format="PNG",width=5,height=10,font_size=8):
"""
from pyrankability.plot import spider, AB_to_P2
A = [4, 10, 1, 12, 3, 9, 0, 6, 5, 11, 2, 8, 7]
B = [5, 4, 10, 1, 7, 6, 12, 3, 9, 0, 11, 2, 8]
spider(AB_to_P2(A,B))
"""
rcParams['figure.figsize'] = width, height
G = nx.Graph()
pos = {}
buffer = 0.25
step = (2-2*buffer)/P2.shape[1]
labels={}
y1 = []
y2 = []
y = []
index = []
for i in range(P2.shape[1]):
v = str(i+1)
name1 = f"A{v}:{P2.iloc[0,i]}"
name2 = f"B{v}:{P2.iloc[1,i]}"
#name2 = "B%d:%d"%(i+1,P2.iloc[1,i])
G.add_node(name1)
G.add_node(name2)
loc = 1-buffer-(i*step)
pos[name1] = np.array([-1,loc])
pos[name2] = np.array([1,loc])
labels[name1] = P2.iloc[0,i]
labels[name2] = P2.iloc[1,i]
y1.append(name1)
y2.append(name2)
y.append("A")
y.append("B")
index.append(name1)
index.append(name2)
y=pd.Series(y,index=index)
for i in range(P2.shape[1]):
v=str(i+1)
name1 = f"A{v}:{P2.iloc[0,i]}"
#name1 = "A%d:%d"%(i+1,P2.iloc[0,i])
ix = np.where(P2.iloc[1,:] == P2.iloc[0,i])[0][0]
v=str(ix+1)
name2 = f"B{v}:{P2.iloc[0,i]}"
#name2 = "B%d:%d"%(ix+1,P2.iloc[0,i])
G.add_edge(name1, name2)
edges = G.edges()
nx.draw_networkx_labels(G,pos=pos,labels=labels,font_size=font_size)
color_map = y.map({"A":"white","B":"white"})
nx.draw(G, pos, node_color=color_map)
#A = to_agraph(G)
#A.layout('dot')
#nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels)
if file is not None:
#A.draw(file)
plt.savefig(file)
def show_score_xstar(xstars,indices=None,group_label="Group",fixed_r=None,resolve_scale=False,columns=1,width=300,height=300):
all_df = pd.DataFrame(columns=["i","j","x",group_label,"ri","rj"])
score_df = pd.DataFrame(columns=["num_frac_xstar_upper","num_one_xstar_upper","num_zero_xstar_upper"])
score_df.index.name = group_label
ordered_xstars = {}
for key in xstars.keys():
x = xstars[key].copy()
if fixed_r is not None and key in fixed_r:
r = fixed_r[key]
else:
r = x.sum(axis=0)
order = np.argsort(r)
xstar = x.copy().iloc[order,:].iloc[:,order]
xstar.loc[:,:] = threshold_x(xstar.values)
if indices is not None:
x = x.iloc[indices[key],:].iloc[:,indices[key]]
ordered_xstars[key] = xstar
inxs = np.triu_indices(len(xstar),k=1)
xstar_upper = xstar.values[inxs[0],inxs[1]]
nfrac_upper = sum((xstar_upper > 0) & (xstar_upper < 1))
none_upper = sum(xstar_upper == 1)
nzero_upper = sum(xstar_upper == 0)
score_df = score_df.append(pd.Series([nfrac_upper,none_upper,nzero_upper],index=score_df.columns,name=key))
#rixs = np.argsort(r)
#x = x.iloc[:,rixs].iloc[rixs,:]#np.ix_(rixs,rixs)]
df = (1-x).stack().reset_index()
df.columns=["i","j","x"]
df["ri"] = list(r.loc[df["i"]])
df["rj"] = list(r.loc[df["j"]])
df[group_label] = key
all_df = all_df.append(df)
#all_df = all_df.loc[(all_df.x != 0) & (all_df.x != 1)]
g = alt.Chart(all_df,width=width).mark_square().encode(
x=alt.X(
'i:N',
axis=alt.Axis(labelOverlap=False),
title="r",
sort=alt.EncodingSortField(field="ri",order="ascending") # The order to sort in
),
y=alt.Y(
'j:N',
axis=alt.Axis(labelOverlap=False),
title="r",
sort=alt.EncodingSortField(field="rj",order="ascending") # The order to sort in
),
color=alt.Color("x",scale=alt.Scale(scheme='greys'))
).properties(
width=width,
height=height
).facet(
facet=alt.Column("%s:N"%group_label, title=None),
columns=columns
)
if resolve_scale:
g = g.resolve_scale(x='independent',y='independent')
g.configure_title(
fontSize=12,
font='Times',
orient='bottom'
)
return g,score_df,ordered_xstars
def show_single_xstar(x,indices=None,fixed_r=None,
width=300,height=300,
labelFontSize=10,titleFontSize=10,prepare_url_func=None):
ordered_xstars = {}
if fixed_r is not None and key in fixed_r:
r = fixed_r[key]
else:
r = x.sum(axis=0)
order = np.argsort(r)
xstar = x.copy().iloc[order,:].iloc[:,order]
xstar.loc[:,:] = threshold_x(xstar.values)
if indices is not None:
x = x.iloc[indices[key],:].iloc[:,indices[key]]
# For coloring purposes
x.loc[:,:] = threshold_x(x.values)
ordered_xstar = xstar
inxs = np.triu_indices(len(xstar),k=1)
xstar_upper = xstar.values[inxs]
nfrac_upper = sum((xstar_upper > 0) & (xstar_upper < 1))
none_upper = sum(xstar_upper == 1)
nzero_upper = sum(xstar_upper == 0)
score_series = pd.Series([nfrac_upper,none_upper,nzero_upper],
index=["num_frac_xstar_upper","num_one_xstar_upper","num_zero_xstar_upper"])
df = x.stack().reset_index()
df.columns=["i","j","x"]
df["ri"] = list(r.loc[df["i"]])
df["rj"] = list(r.loc[df["j"]])
df.loc[:,"c"] = "white"
df.loc[(df["x"] > 0) & (df["x"] < 1) & (df["ri"] < df["rj"]),"c"] = "green"
df.loc[(df["x"] > 0) & (df["x"] < 1) & (df["ri"] > df["rj"]),"c"] = "red"
df.loc[df["i"] == df["j"],"c"] = "black"
if prepare_url_func is not None:
df_url = prepare_url_func(df)
else:
df_url = df
g = alt.Chart(df_url,width=width).mark_square().encode(
x=alt.X(
'i:N',
axis=alt.Axis(labelOverlap=False,labelFontSize=8),
title="r",
sort=alt.EncodingSortField(field="ri",order="ascending") # The order to sort in
),
y=alt.Y(
'j:N',
axis=alt.Axis(labelOverlap=False,labelFontSize=8),
title="r",
sort=alt.EncodingSortField(field="rj",order="ascending") # The order to sort in
),
color=alt.Color("c:N",scale=None)#alt.Scale(scheme='greys'))
).properties(
width=width,
height=height
).configure_axis(
labelFontSize=labelFontSize,
titleFontSize=titleFontSize
)
return g,score_series,ordered_xstar
def show_score_xstar2(xstars,indices=None,group_label="Group",fixed_r=None,resolve_scale=False,columns=1,width=300,height=300,labelFontSize=12):
all_df = pd.DataFrame(columns=["i","j","x",group_label,"ri","rj"])
score_df = pd.DataFrame(columns=["num_frac_xstar_upper","num_one_xstar_upper","num_zero_xstar_upper"])
score_df.index.name = group_label
ordered_xstars = {}
for key in xstars.keys():
x = xstars[key].copy()
if fixed_r is not None and key in fixed_r:
r = fixed_r[key]
else:
r = x.sum(axis=0)
order = np.argsort(r)
xstar = x.copy().iloc[order,:].iloc[:,order]
xstar.loc[:,:] = threshold_x(xstar.values)
if indices is not None:
x = x.iloc[indices[key],:].iloc[:,indices[key]]
# For coloring purposes
x.loc[:,:] = threshold_x(x.values)
ordered_xstars[key] = xstar
inxs = np.triu_indices(len(xstar),k=1)
xstar_upper = xstar.values[inxs]
#import pdb; pdb.set_trace()
nfrac_upper = sum((xstar_upper > 0) & (xstar_upper < 1))
none_upper = sum(xstar_upper == 1)
nzero_upper = sum(xstar_upper == 0)
score_df = score_df.append(pd.Series([nfrac_upper,none_upper,nzero_upper],index=score_df.columns,name=key))
#rixs = np.argsort(r)
#x = x.iloc[:,rixs].iloc[rixs,:]#np.ix_(rixs,rixs)]
df = x.stack().reset_index()
df.columns=["i","j","x"]
df["ri"] = list(r.loc[df["i"]])
df["rj"] = list(r.loc[df["j"]])
df.loc[:,"c"] = "white"
df.loc[(df["x"] > 0) & (df["x"] < 1) & (df["ri"] < df["rj"]),"c"] = "green"
df.loc[(df["x"] > 0) & (df["x"] < 1) & (df["ri"] > df["rj"]),"c"] = "red"
df.loc[df["i"] == df["j"],"c"] = "black"
df[group_label] = key
all_df = all_df.append(df)
#all_df = all_df.loc[(all_df.x != 0) & (all_df.x != 1)]
g = alt.Chart(all_df,width=width).mark_square().encode(
x=alt.X(
'i:N',
axis=alt.Axis(labelOverlap=False,labelFontSize=8),
title="r",
sort=alt.EncodingSortField(field="ri",order="ascending") # The order to sort in
),
y=alt.Y(
'j:N',
axis=alt.Axis(labelOverlap=False,labelFontSize=8),
title="r",
sort=alt.EncodingSortField(field="rj",order="ascending") # The order to sort in
),
color=alt.Color("c",scale=None)#alt.Scale(scheme='greys'))
).properties(
width=width,
height=height
).facet(
facet=alt.Column(title=None,field=alt.Field(group_label),type='nominal',header=alt.Header(labelFontSize=labelFontSize,labelOrient='bottom')),
#alt.Column("%s:N"%group_label, title=,header=alt.Header(labelBaseline="bottom")),
columns=columns
).configure_axis(
labelFontSize=10,
titleFontSize=10
)
#g= g.configure_title(
# fontSize=12,
# font='Times',
# titleAnchor='bottom'
#)
if resolve_scale:
g = g.resolve_scale(x='independent',y='independent')
return g,score_df,ordered_xstars
def show_hillside(V,P0):
perm=pd.Series(P0,index=V.columns)
r=perm.argsort()
#V_G=V.iloc[perm,:].iloc[:,perm]
#x = pd.DataFrame(details['x'],index=V.index,columns=V.columns).iloc[perm,:].iloc[:,perm]
#r = x.sum(axis=1)
df=V.T.stack().to_frame().reset_index()
df.columns=["team_i_name","team_k_name","v"]
df["ri"] = list(-r.loc[df["team_i_name"]])
df["rk"] = list(r.loc[df["team_k_name"]])
g=alt.Chart(df).mark_circle().encode(
x=alt.X(
'team_i_name:N',
axis=alt.Axis(labelOverlap=False),
title="r",
sort=alt.SortField(field="ri",order="descending") # The order to sort in
),
y=alt.Y(
'team_k_name:N',
axis=alt.Axis(labelOverlap=False),
title="r",
sort=alt.SortField(field="rk",order="ascending") # The order to sort in
),
size='v:Q'
)
return g
| 32.507495
| 149
| 0.562743
| 2,271
| 15,181
| 3.648613
| 0.104359
| 0.028965
| 0.011948
| 0.019551
| 0.817524
| 0.795197
| 0.780352
| 0.760922
| 0.746198
| 0.733044
| 0
| 0.030265
| 0.257822
| 15,181
| 466
| 150
| 32.577253
| 0.705157
| 0.107898
| 0
| 0.743802
| 0
| 0
| 0.072405
| 0.016486
| 0
| 0
| 0
| 0
| 0.022039
| 1
| 0.027548
| false
| 0
| 0.027548
| 0.002755
| 0.07438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
10bdb8dac2ff47e896e1b65d678f6d391f8aa2df
| 118
|
py
|
Python
|
wikidata_tree_generator/tree_builder/tree_generator/__init__.py
|
lmallez/wikidata-tree-generator
|
4fe6b8af6615083e670bdd9495624f4292fd53c0
|
[
"MIT"
] | 4
|
2020-07-06T09:48:30.000Z
|
2020-10-27T06:56:44.000Z
|
wikidata_tree_generator/tree_builder/tree_generator/__init__.py
|
lmallez/wikidata-tree-generator
|
4fe6b8af6615083e670bdd9495624f4292fd53c0
|
[
"MIT"
] | 2
|
2020-10-10T13:59:19.000Z
|
2021-06-25T15:44:46.000Z
|
wikidata_tree_generator/tree_builder/tree_generator/__init__.py
|
lmallez/wikidata-tree-generator
|
4fe6b8af6615083e670bdd9495624f4292fd53c0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from .tree_generator import TreeGenerator
from .cache_tree_generator import CacheTreeGenerator
| 29.5
| 52
| 0.855932
| 15
| 118
| 6.533333
| 0.733333
| 0.265306
| 0.387755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009259
| 0.084746
| 118
| 3
| 53
| 39.333333
| 0.898148
| 0.177966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
52b0fdd52e4a276180de0f9b6fcc9f6edd5eb949
| 5,754
|
py
|
Python
|
gobigger/hyper/tests/test_demo.py
|
jayyoung0802/GoBigger
|
f7cf14ee4208e041295035342ecee70026f268d9
|
[
"Apache-2.0"
] | 189
|
2021-10-08T07:55:10.000Z
|
2022-03-31T23:49:43.000Z
|
gobigger/hyper/tests/test_demo.py
|
jayyoung0802/GoBigger
|
f7cf14ee4208e041295035342ecee70026f268d9
|
[
"Apache-2.0"
] | 25
|
2021-11-01T06:59:30.000Z
|
2022-03-22T11:22:27.000Z
|
gobigger/hyper/tests/test_demo.py
|
jayyoung0802/GoBigger
|
f7cf14ee4208e041295035342ecee70026f268d9
|
[
"Apache-2.0"
] | 28
|
2021-10-14T12:23:14.000Z
|
2022-03-31T23:49:45.000Z
|
import pygame
import time
import logging
from gobigger.hyper import StraightMergeHyperAction, QuarterMergeHyperAction, EighthMergeHyperAction
from gobigger.server import Server
from gobigger.render import RealtimeRender, RealtimePartialRender, EnvRender
def demo_straight_merge():
server = Server(dict(
team_num=1,
player_num_per_team=2,
map_width=600,
map_height=600,
match_time=60*1,
state_tick_per_second=20, # frame
action_tick_per_second=5, # frame
))
server.start()
render = RealtimeRender(server.map_width, server.map_height)
server.set_render(render)
server.player_manager.get_players()[0].get_balls()[0].set_size(420)
server.player_manager.get_players()[1].get_balls()[0].set_size(100)
player_name1 = server.player_manager.get_players()[0].name
player_name2 = server.player_manager.get_players()[1].name
sm_action = StraightMergeHyperAction(player_name1, player_name2)
fps_real = 0
t1 = time.time()
clock = pygame.time.Clock()
fps_set = server.state_tick_per_second
for _ in range(100000):
obs = server.obs()
sm_action.update(obs[1][player_name1], obs[1][player_name2])
action = sm_action.get()
if server.last_time < server.match_time:
for i in range(server.state_tick_per_action_tick):
if i == 0:
server.step_state_tick(actions=action)
else:
server.step_state_tick()
render.fill(server, direction=None, fps=fps_real, last_time=server.last_time,
player_num_per_team=server.player_num_per_team)
render.show()
if i % server.state_tick_per_second == 0:
t2 = time.time()
fps_real = server.state_tick_per_second/(t2-t1)
t1 = time.time()
clock.tick(fps_set)
else:
logging.debug('Game Over')
break
render.close()
def demo_quarter_merge():
server = Server(dict(
team_num=1,
player_num_per_team=2,
map_width=600,
map_height=600,
match_time=60*1,
state_tick_per_second=20, # frame
action_tick_per_second=5, # frame
))
server.start()
render = RealtimeRender(server.map_width, server.map_height)
server.set_render(render)
server.player_manager.get_players()[0].get_balls()[0].set_size(420)
server.player_manager.get_players()[1].get_balls()[0].set_size(100)
player_name1 = server.player_manager.get_players()[0].name
player_name2 = server.player_manager.get_players()[1].name
sm_action = QuarterMergeHyperAction(player_name1, player_name2)
fps_real = 0
t1 = time.time()
clock = pygame.time.Clock()
fps_set = server.state_tick_per_second
for _ in range(100000):
obs = server.obs()
sm_action.update(obs[1][player_name1], obs[1][player_name2])
action = sm_action.get()
print(action)
if server.last_time < server.match_time:
for i in range(server.state_tick_per_action_tick):
if i == 0:
server.step_state_tick(actions=action)
else:
server.step_state_tick()
render.fill(server, direction=None, fps=fps_real, last_time=server.last_time,
player_num_per_team=server.player_num_per_team)
render.show()
if i % server.state_tick_per_second == 0:
t2 = time.time()
fps_real = server.state_tick_per_second/(t2-t1)
t1 = time.time()
clock.tick(fps_set)
else:
logging.debug('Game Over')
break
render.close()
def demo_eighth_merge():
server = Server(dict(
team_num=1,
player_num_per_team=2,
map_width=600,
map_height=600,
match_time=60*1,
state_tick_per_second=20, # frame
action_tick_per_second=5, # frame
))
server.start()
render = RealtimeRender(server.map_width, server.map_height)
server.set_render(render)
server.player_manager.get_players()[0].get_balls()[0].set_size(820)
server.player_manager.get_players()[1].get_balls()[0].set_size(100)
player_name1 = server.player_manager.get_players()[0].name
player_name2 = server.player_manager.get_players()[1].name
sm_action = EighthMergeHyperAction(player_name1, player_name2)
fps_real = 0
t1 = time.time()
clock = pygame.time.Clock()
fps_set = server.state_tick_per_second
for _ in range(100000):
obs = server.obs()
sm_action.update(obs[1][player_name1], obs[1][player_name2])
action = sm_action.get()
print(action)
if server.last_time < server.match_time:
for i in range(server.state_tick_per_action_tick):
if i == 0:
server.step_state_tick(actions=action)
else:
server.step_state_tick()
render.fill(server, direction=None, fps=fps_real, last_time=server.last_time,
player_num_per_team=server.player_num_per_team)
render.show()
if i % server.state_tick_per_second == 0:
t2 = time.time()
fps_real = server.state_tick_per_second/(t2-t1)
t1 = time.time()
clock.tick(fps_set)
else:
logging.debug('Game Over')
break
render.close()
if __name__ == '__main__':
# demo_straight_merge()
# demo_quarter_merge()
demo_eighth_merge()
| 37.122581
| 100
| 0.614355
| 728
| 5,754
| 4.539835
| 0.115385
| 0.057186
| 0.054463
| 0.065356
| 0.884115
| 0.884115
| 0.884115
| 0.884115
| 0.884115
| 0.884115
| 0
| 0.034977
| 0.284498
| 5,754
| 154
| 101
| 37.363636
| 0.767792
| 0.013556
| 0
| 0.894366
| 0
| 0
| 0.006177
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021127
| false
| 0
| 0.042254
| 0
| 0.06338
| 0.014085
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eab0ec39a226aa188d6b92f10f618143c1c55a55
| 6,129
|
py
|
Python
|
stubs/events.py
|
claytonbrown/troposphere
|
bf0f1e48b14f578de0221d50f711467ad716ca87
|
[
"BSD-2-Clause"
] | null | null | null |
stubs/events.py
|
claytonbrown/troposphere
|
bf0f1e48b14f578de0221d50f711467ad716ca87
|
[
"BSD-2-Clause"
] | null | null | null |
stubs/events.py
|
claytonbrown/troposphere
|
bf0f1e48b14f578de0221d50f711467ad716ca87
|
[
"BSD-2-Clause"
] | null | null | null |
from . import AWSObject, AWSProperty
from .validators import *
from .constants import *
# -------------------------------------------
class EventsTarget(AWSProperty):
"""# Target - CloudFormationResourceSpecification version: 1.4.0
{
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html",
"Properties": {
"Arn": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html#cfn-events-rule-target-arn",
"PrimitiveType": "String",
"Required": true,
"UpdateType": "Mutable"
},
"Id": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html#cfn-events-rule-target-id",
"PrimitiveType": "String",
"Required": true,
"UpdateType": "Mutable"
},
"Input": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html#cfn-events-rule-target-input",
"PrimitiveType": "String",
"Required": false,
"UpdateType": "Mutable"
},
"InputPath": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html#cfn-events-rule-target-inputpath",
"PrimitiveType": "String",
"Required": false,
"UpdateType": "Mutable"
}
}
}
"""
props = {
'Arn': (basestring, True, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html#cfn-events-rule-target-arn'),
'Id': (basestring, True, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html#cfn-events-rule-target-id'),
'Input': (basestring, False, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html#cfn-events-rule-target-input'),
'InputPath': (basestring, False, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-events-rule-target.html#cfn-events-rule-target-inputpath')
}
# -------------------------------------------
class EventsRule(AWSObject):
"""# AWS::Events::Rule - CloudFormationResourceSpecification version: 1.4.0
{
"Attributes": {
"Arn": {
"PrimitiveType": "String"
}
},
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html",
"Properties": {
"Description": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-description",
"PrimitiveType": "String",
"Required": false,
"UpdateType": "Mutable"
},
"EventPattern": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-eventpattern",
"PrimitiveType": "Json",
"Required": false,
"UpdateType": "Mutable"
},
"Name": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-name",
"PrimitiveType": "String",
"Required": false,
"UpdateType": "Immutable"
},
"RoleArn": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-rolearn",
"PrimitiveType": "String",
"Required": false,
"UpdateType": "Mutable"
},
"ScheduleExpression": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-scheduleexpression",
"PrimitiveType": "String",
"Required": false,
"UpdateType": "Mutable"
},
"State": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-state",
"PrimitiveType": "String",
"Required": false,
"UpdateType": "Mutable"
},
"Targets": {
"Documentation": "http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-targets",
"DuplicatesAllowed": false,
"ItemType": "Target",
"Required": false,
"Type": "List",
"UpdateType": "Mutable"
}
}
}
"""
resource_type = "AWS::Events::Rule"
props = {
'Description': (basestring, False, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-description'),
'EventPattern': ((basestring, dict), False, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-eventpattern'),
'Name': (basestring, False, 'Immutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-name'),
'RoleArn': (basestring, False, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-rolearn'),
'ScheduleExpression': (basestring, False, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-scheduleexpression'),
'State': (basestring, False, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-state'),
'Targets': ([Target], False, 'Mutable', 'http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-events-rule.html#cfn-events-rule-targets')
}
| 50.652893
| 191
| 0.632566
| 599
| 6,129
| 6.470785
| 0.096828
| 0.123839
| 0.068111
| 0.105263
| 0.853715
| 0.819659
| 0.71904
| 0.71904
| 0.71904
| 0.71904
| 0
| 0.001212
| 0.192364
| 6,129
| 120
| 192
| 51.075
| 0.781818
| 0.639093
| 0
| 0.095238
| 0
| 0.52381
| 0.688808
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.380952
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eabce3ad390ba3b66800d65271bbeafb6da88860
| 206
|
py
|
Python
|
boa3_test/test_sc/native_test/ledger/GetTransactionFromBlockMismatchedType.py
|
OnBlockIO/neo3-boa
|
cb317292a67532a52ed26f2b0f0f7d0b10ac5f5f
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/native_test/ledger/GetTransactionFromBlockMismatchedType.py
|
OnBlockIO/neo3-boa
|
cb317292a67532a52ed26f2b0f0f7d0b10ac5f5f
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/native_test/ledger/GetTransactionFromBlockMismatchedType.py
|
OnBlockIO/neo3-boa
|
cb317292a67532a52ed26f2b0f0f7d0b10ac5f5f
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from boa3.builtin.interop.blockchain import Transaction
from boa3.builtin.nativecontract.ledger import Ledger
def main() -> Transaction:
return Ledger.get_transaction_from_block('height', 'tx_index')
| 29.428571
| 66
| 0.805825
| 26
| 206
| 6.230769
| 0.653846
| 0.098765
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010811
| 0.101942
| 206
| 6
| 67
| 34.333333
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0.067961
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
eafb683393a5d4f223f79ae50abb804372c30954
| 12,522
|
py
|
Python
|
MxbaiduAi/image.py
|
yuanyunqiang/MxbaiduAi
|
c2b61a3576f1b44db20b8b2569a9e9079906b77a
|
[
"Apache-2.0"
] | null | null | null |
MxbaiduAi/image.py
|
yuanyunqiang/MxbaiduAi
|
c2b61a3576f1b44db20b8b2569a9e9079906b77a
|
[
"Apache-2.0"
] | null | null | null |
MxbaiduAi/image.py
|
yuanyunqiang/MxbaiduAi
|
c2b61a3576f1b44db20b8b2569a9e9079906b77a
|
[
"Apache-2.0"
] | null | null | null |
import requests
import base64
err_code={
1 :'服务器内部错误,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
2 :'服务暂不可用,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
3 :'调用的API不存在,请检查请求URL后重新尝试,一般为URL中有非英文字符,如“-”,可手动输入重试',
4 :'集群超限额,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
6 :'无权限访问该用户数据,创建应用时未勾选相关接口,请登录百度云控制台,找到对应的应用,编辑应用,勾选上相关接口,然后重试调用',
13 :'获取token失败',
14 :'IAM鉴权失败',
15 :'应用不存在或者创建失败',
17 :'每天请求量超限额,已上线计费的接口,请直接在控制台开通计费,调用量不受限制,按调用量阶梯计费;未上线计费的接口,请提交工单联系申请提额',
18 :'QPS超限额,已上线计费的接口,请直接在控制台开通计费,调用量不受限制,按调用量阶梯计费;未上线计费的接口,请提交工单联系申请提额',
19 :'请求总量超限额,已上线计费的接口,请直接在控制台开通计费,调用量不受限制,按调用量阶梯计费;未上线计费的接口,请提交工单联系申请提额',
100 :'无效的access_token参数,token拉取失败,可以参考“Access Token获取”重新获取',
110 :'access_token无效,token有效期为30天,注意需要定期更换,也可以每次请求都拉取新token',
111 :'access_token无效,token有效期为30天,注意需要定期更换,也可以每次请求都拉取新token',
216100 :'请求中包含非法参数,请检查后重新尝试',
216101 :'缺少必须的参数,请检查参数是否有遗漏',
216102 :'请求了不支持的服务,请检查调用的url',
216103 :'请求中某些参数过长,请检查后重新尝试',
216110 :'appid不存在,请重新核对信息是否为后台应用列表中的appid',
216200 :'图片为空,请检查后重新尝试',
216201 :'上传的图片格式错误,现阶段我们支持的图片格式为:PNG、JPG、JPEG、BMP,请进行转码或更换图片',
216202 :'上传的图片大小错误,现阶段我们支持的图片大小为:base64编码后小于4M,分辨率不高于4096*4096,请重新上传图片',
216203 :'自定义菜品识别服务错误码:上传的图片中包含多个主体,请上传只包含一个主体的菜品图片入库',
216204 :'logo识别服务错误码:后端服务超时,请工单联系技术支持团队',
216630 :'识别错误,请再次请求,如果持续出现此类错误,请提交工单联系技术支持团队',
216634 :'检测错误,请再次请求,如果持续出现此类错误,请提交工单联系技术支持团队',
216681 :'添加入库的图片已经在库里,完全相同(Base64编码相同)的图片不能重复入库',
282000 :'服务器内部错误,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
282003 :'请求参数缺失',
282005 :'处理批量任务时发生部分或全部错误,请根据具体错误码排查',
282006 :'批量任务处理数量超出限制,请将任务数量减少到10或10以下',
282100 :'图片压缩转码错误',
282101 :'长图片切分数量超限',
282102 :'未检测到图片中识别目标',
282103 :'图片目标识别错误',
282110 :'URL参数不存在,请核对URL后再次提交',
282111 :'URL格式非法,请检查url格式是否符合相应接口的入参要求',
282112 :'url下载超时,请检查url对应的图床/图片无法下载或链路状况不好,您可以重新尝试一下,如果多次尝试后仍不行,建议更换图片地址',
282113 :'URL返回无效参数',
282114 :'URL长度超过1024字节或为0',
282808 :'request id 不存在',
282809 :'返回结果请求错误(不属于excel或json)',
282810 :'图像识别错误',
283300 :'入参格式有误,可检查下图片编码、代码格式是否有误',
336000 :'服务器内部错误,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
336001 :'入参格式有误,比如缺少必要参数、图片base64编码错误等等,可检查下图片编码、代码格式是否有误。有疑问请提交工单联系技术支持团队',
}
class imageAI():
def __init__(self,APIKey,SecretKey) -> None:
self.apikey=APIKey
self.secretkey=SecretKey
self.data=''
self.err_code={
1 :'服务器内部错误,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
2 :'服务暂不可用,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
3 :'调用的API不存在,请检查请求URL后重新尝试,一般为URL中有非英文字符,如“-”,可手动输入重试',
4 :'集群超限额,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
6 :'无权限访问该用户数据,创建应用时未勾选相关接口,请登录百度云控制台,找到对应的应用,编辑应用,勾选上相关接口,然后重试调用',
13 :'获取token失败',
14 :'IAM鉴权失败',
15 :'应用不存在或者创建失败',
17 :'每天请求量超限额,已上线计费的接口,请直接在控制台开通计费,调用量不受限制,按调用量阶梯计费;未上线计费的接口,请提交工单联系申请提额',
18 :'QPS超限额,已上线计费的接口,请直接在控制台开通计费,调用量不受限制,按调用量阶梯计费;未上线计费的接口,请提交工单联系申请提额',
19 :'请求总量超限额,已上线计费的接口,请直接在控制台开通计费,调用量不受限制,按调用量阶梯计费;未上线计费的接口,请提交工单联系申请提额',
100 :'无效的access_token参数,token拉取失败,可以参考“Access Token获取”重新获取',
110 :'access_token无效,token有效期为30天,注意需要定期更换,也可以每次请求都拉取新token',
111 :'access_token无效,token有效期为30天,注意需要定期更换,也可以每次请求都拉取新token',
216100 :'请求中包含非法参数,请检查后重新尝试',
216101 :'缺少必须的参数,请检查参数是否有遗漏',
216102 :'请求了不支持的服务,请检查调用的url',
216103 :'请求中某些参数过长,请检查后重新尝试',
216110 :'appid不存在,请重新核对信息是否为后台应用列表中的appid',
216200 :'图片为空,请检查后重新尝试',
216201 :'上传的图片格式错误,现阶段我们支持的图片格式为:PNG、JPG、JPEG、BMP,请进行转码或更换图片',
216202 :'上传的图片大小错误,现阶段我们支持的图片大小为:base64编码后小于4M,分辨率不高于4096*4096,请重新上传图片',
216203 :'自定义菜品识别服务错误码:上传的图片中包含多个主体,请上传只包含一个主体的菜品图片入库',
216204 :'logo识别服务错误码:后端服务超时,请工单联系技术支持团队',
216630 :'识别错误,请再次请求,如果持续出现此类错误,请提交工单联系技术支持团队',
216634 :'检测错误,请再次请求,如果持续出现此类错误,请提交工单联系技术支持团队',
216681 :'添加入库的图片已经在库里,完全相同(Base64编码相同)的图片不能重复入库',
282000 :'服务器内部错误,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
282003 :'请求参数缺失',
282005 :'处理批量任务时发生部分或全部错误,请根据具体错误码排查',
282006 :'批量任务处理数量超出限制,请将任务数量减少到10或10以下',
282100 :'图片压缩转码错误',
282101 :'长图片切分数量超限',
282102 :'未检测到图片中识别目标',
282103 :'图片目标识别错误',
282110 :'URL参数不存在,请核对URL后再次提交',
282111 :'URL格式非法,请检查url格式是否符合相应接口的入参要求',
282112 :'url下载超时,请检查url对应的图床/图片无法下载或链路状况不好,您可以重新尝试一下,如果多次尝试后仍不行,建议更换图片地址',
282113 :'URL返回无效参数',
282114 :'URL长度超过1024字节或为0',
282808 :'request id 不存在',
282809 :'返回结果请求错误(不属于excel或json)',
282810 :'图像识别错误',
283300 :'入参格式有误,可检查下图片编码、代码格式是否有误',
336000 :'服务器内部错误,请再次请求, 如果持续出现此类错误,请提交工单联系技术支持团队',
336001 :'入参格式有误,比如缺少必要参数、图片base64编码错误等等,可检查下图片编码、代码格式是否有误。有疑问请提交工单联系技术支持团队',
}
def access_token(self):
host = 'https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id='+self.apikey+'&client_secret='+self.secretkey
response = requests.get(host)
if response:
return {'msg':'ok','data':response.json()['access_token']}
else:
return {'msg':'err','data':'Failed to get access token'}
def result(self,pop):
if self.data['msg']==True:
return self.data['data'][pop]
else:
return self.data['data']
def animal(self,img_url,):
self.ak=self.access_token()
if self.ak['msg']=='err':
return {'msg':'err','data':'Failed to get access token'}
else:
request_url = "https://aip.baidubce.com/rest/2.0/image-classify/v1/animal"
f = open(img_url, 'rb')
img = base64.b64encode(f.read())
params = {"image":img}
request_url = request_url + "?access_token=" + self.ak['data']
headers = {'content-type': 'application/x-www-form-urlencoded'}
response = requests.post(request_url, data=params, headers=headers)
if response:
print(response.json())
try:
self.name=response.json()['result'][0]['name']
self.score=response.json()['result'][0]['score']
self.data={'msg':True,'data':(self.name,self.score)}
except:
code=response.json()['error_code']
err_msg=response.json()['error_msg']
self.data={'msg':False,'data':'错误码:'+str(code)+' '+err_msg+' '+err_code[code]}
def plant(self,img_url,):
self.ak=self.access_token()
if self.ak['msg']=='err':
return {'msg':'err','data':'Failed to get access token'}
else:
request_url = "https://aip.baidubce.com/rest/2.0/image-classify/v1/plant"
f = open(img_url, 'rb')
img = base64.b64encode(f.read())
params = {"image":img}
request_url = request_url + "?access_token=" + self.ak['data']
headers = {'content-type': 'application/x-www-form-urlencoded'}
response = requests.post(request_url, data=params, headers=headers)
if response:
print(response.json())
try:
self.name=response.json()['result'][0]['name']
self.score=response.json()['result'][0]['score']
self.data={'msg':True,'data':(self.name,self.score)}
except:
code=response.json()['error_code']
err_msg=response.json()['error_msg']
self.data={'msg':False,'data':'错误码:'+str(code)+' '+err_msg+' '+err_code[code]}
def ingredient(self,img_url,):
self.ak=self.access_token()
if self.ak['msg']=='err':
return {'msg':'err','data':'Failed to get access token'}
else:
request_url = "https://aip.baidubce.com/rest/2.0/image-classify/v1/classify/ingredient"
f = open(img_url, 'rb')
img = base64.b64encode(f.read())
params = {"image":img}
request_url = request_url + "?access_token=" + self.ak['data']
headers = {'content-type': 'application/x-www-form-urlencoded'}
response = requests.post(request_url, data=params, headers=headers)
if response:
print(response.json())
try:
self.name=response.json()['result'][0]['name']
self.score=response.json()['result'][0]['score']
self.data={'msg':True,'data':(self.name,self.score)}
except:
code=response.json()['error_code']
err_msg=response.json()['error_msg']
self.data={'msg':False,'data':'错误码:'+str(code)+' '+err_msg+' '+err_code[code]}
def dish(self,img_url,):
self.ak=self.access_token()
if self.ak['msg']=='err':
return {'msg':'err','data':'Failed to get access token'}
else:
request_url = "https://aip.baidubce.com/rest/2.0/image-classify/v2/dish"
f = open(img_url, 'rb')
img = base64.b64encode(f.read())
params = {"image":img}
request_url = request_url + "?access_token=" + self.ak['data']
headers = {'content-type': 'application/x-www-form-urlencoded'}
response = requests.post(request_url, data=params, headers=headers)
if response:
print(response.json())
try:
self.name=response.json()['result'][0]['name']
self.probability=response.json()['result'][0]['probability']
self.data={'msg':True,'data':(self.name,self.probability)}
except:
code=response.json()['error_code']
err_msg=response.json()['error_msg']
self.data={'msg':False,'data':'错误码:'+str(code)+' '+err_msg+' '+err_code[code]}
def currency(self,img_url,):
self.ak=self.access_token()
if self.ak['msg']=='err':
return {'msg':'err','data':'Failed to get access token'}
else:
request_url = "https://aip.baidubce.com/rest/2.0/image-classify/v1/currency"
f = open(img_url, 'rb')
img = base64.b64encode(f.read())
params = {"image":img}
request_url = request_url + "?access_token=" + self.ak['data']
headers = {'content-type': 'application/x-www-form-urlencoded'}
response = requests.post(request_url, data=params, headers=headers)
if response:
print(response.json())
try:
self.name=response.json()['result']['currencyName']
if response.json()['result']['hasdetail']==1:
self.currencyDenomination=response.json()['result']['currencyDenomination']
else:
self.currencyDenomination='无法识别'
self.data={'msg':True,'data':(self.name,self.currencyDenomination)}
except:
code=response.json()['error_code']
err_msg=response.json()['error_msg']
self.data={'msg':False,'data':'错误码:'+str(code)+' '+err_msg+' '+err_code[code]}
def landmark(self,img_url,):
self.ak=self.access_token()
if self.ak['msg']=='err':
return {'msg':'err','data':'Failed to get access token'}
else:
request_url = "https://aip.baidubce.com/rest/2.0/image-classify/v1/landmark"
f = open(img_url, 'rb')
img = base64.b64encode(f.read())
params = {"image":img}
request_url = request_url + "?access_token=" + self.ak['data']
headers = {'content-type': 'application/x-www-form-urlencoded'}
response = requests.post(request_url, data=params, headers=headers)
if response:
print(response.json())
try:
self.name=response.json()['result']['landmark']
self.data={'msg':True,'data':(self.name,100)}
except:
code=response.json()['error_code']
err_msg=response.json()['error_msg']
self.data={'msg':False,'data':'错误码:'+str(code)+' '+err_msg+' '+err_code[code]}
| 47.977011
| 143
| 0.5793
| 1,275
| 12,522
| 5.614118
| 0.185882
| 0.05197
| 0.054764
| 0.021235
| 0.901509
| 0.901509
| 0.901509
| 0.897737
| 0.889075
| 0.883766
| 0
| 0.060234
| 0.270803
| 12,522
| 260
| 144
| 48.161538
| 0.723689
| 0
| 0
| 0.839357
| 0
| 0.028112
| 0.362511
| 0.200942
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036145
| false
| 0
| 0.008032
| 0
| 0.088353
| 0.024096
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc2e10be4100b140690688a507af659c46d51f4f
| 181
|
py
|
Python
|
doubleclickcrypto/__init__.py
|
danielhedren/doubleclickcrypto
|
814e375e1527ce837852f56cc912855eaeddfa2e
|
[
"MIT"
] | null | null | null |
doubleclickcrypto/__init__.py
|
danielhedren/doubleclickcrypto
|
814e375e1527ce837852f56cc912855eaeddfa2e
|
[
"MIT"
] | null | null | null |
doubleclickcrypto/__init__.py
|
danielhedren/doubleclickcrypto
|
814e375e1527ce837852f56cc912855eaeddfa2e
|
[
"MIT"
] | null | null | null |
from .doubleclickcrypto import DoubleClickCrypto
from .doubleclickcrypto import StaleResponseException
from .doubleclickcrypto import SignatureException
name = "doubleclickcrypto"
| 30.166667
| 53
| 0.878453
| 14
| 181
| 11.357143
| 0.428571
| 0.396226
| 0.509434
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088398
| 181
| 5
| 54
| 36.2
| 0.963636
| 0
| 0
| 0
| 0
| 0
| 0.093923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dc650ede7dc4400b1e1cc919ec0d26fb9d8b2fb0
| 3,636
|
py
|
Python
|
MetaScreener/external_sw/mgltools/MGLToolsPckgs/AppFramework/ColorMaps/rwb128_map.py
|
bio-hpc/metascreener
|
6900497629f601c4b6c0c37da26de58ffa221988
|
[
"Apache-2.0"
] | 8
|
2021-12-14T21:30:01.000Z
|
2022-02-14T11:30:03.000Z
|
MetaScreener/external_sw/mgltools/MGLToolsPckgs/AppFramework/ColorMaps/rwb128_map.py
|
bio-hpc/metascreener
|
6900497629f601c4b6c0c37da26de58ffa221988
|
[
"Apache-2.0"
] | null | null | null |
MetaScreener/external_sw/mgltools/MGLToolsPckgs/AppFramework/ColorMaps/rwb128_map.py
|
bio-hpc/metascreener
|
6900497629f601c4b6c0c37da26de58ffa221988
|
[
"Apache-2.0"
] | null | null | null |
from DejaVu.colorMap import ColorMap
from numpy import array
cm = ColorMap('rwb128')
cfg = {'name': 'rwb128', 'ramp': [[0.002, 0.0, 1.0, 1.0], [0.012978, 0.011, 1.0, 1.0], [0.030942, 0.029, 1.0, 1.0], [0.047908, 0.046, 1.0, 1.0], [0.064874, 0.063, 1.0, 1.0], [0.075852, 0.074, 1.0, 1.0], [0.092818, 0.091, 1.0, 1.0], [0.110782, 0.109, 1.0, 1.0], [0.127748, 0.126, 1.0, 1.0], [0.144714, 0.143, 1.0, 1.0], [0.155692, 0.154, 1.0, 1.0], [0.172658, 0.171, 1.0, 1.0], [0.190622, 0.189, 1.0, 1.0], [0.207588, 0.206, 1.0, 1.0], [0.218566, 0.217, 1.0, 1.0], [0.235532, 0.234, 1.0, 1.0], [0.252498, 0.251, 1.0, 1.0], [0.270462, 0.269, 1.0, 1.0], [0.287428, 0.286, 1.0, 1.0], [0.298406, 0.297, 1.0, 1.0], [0.315372, 0.314, 1.0, 1.0], [0.332338, 0.331, 1.0, 1.0], [0.350302, 0.349, 1.0, 1.0], [0.36128, 0.36, 1.0, 1.0], [0.378246, 0.377, 1.0, 1.0], [0.395212, 0.394, 1.0, 1.0], [0.412178, 0.411, 1.0, 1.0], [0.430142, 0.429, 1.0, 1.0], [0.44112, 0.44, 1.0, 1.0], [0.458086, 0.457, 1.0, 1.0], [0.475052, 0.474, 1.0, 1.0], [0.492018, 0.491, 1.0, 1.0], [0.503994, 0.503, 1.0, 1.0], [0.52096, 0.52, 1.0, 1.0], [0.537926, 0.537, 1.0, 1.0], [0.554892, 0.554, 1.0, 1.0], [0.571858, 0.571, 1.0, 1.0], [0.583834, 0.583, 1.0, 1.0], [0.6008, 0.6, 1.0, 1.0], [0.617766, 0.617, 1.0, 1.0], [0.634732, 0.634, 1.0, 1.0], [0.646708, 0.646, 1.0, 1.0], [0.663674, 0.663, 1.0, 1.0], [0.68064, 0.68, 1.0, 1.0], [0.697606, 0.697, 1.0, 1.0], [0.714572, 0.714, 1.0, 1.0], [0.726548, 0.726, 1.0, 1.0], [0.743514, 0.743, 1.0, 1.0], [0.76048, 0.76, 1.0, 1.0], [0.777446, 0.777, 1.0, 1.0], [0.789422, 0.789, 1.0, 1.0], [0.806388, 0.806, 1.0, 1.0], [0.823354, 0.823, 1.0, 1.0], [0.84032, 0.84, 1.0, 1.0], [0.857286, 0.857, 1.0, 1.0], [0.869262, 0.869, 1.0, 1.0], [0.886228, 0.886, 1.0, 1.0], [0.903194, 0.903, 1.0, 1.0], [0.92016, 0.92, 1.0, 1.0], [0.931138, 0.931, 1.0, 1.0], [0.949102, 0.949, 1.0, 1.0], [0.966068, 0.966, 1.0, 1.0], [0.983034, 0.983, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 0.989, 0.989, 1.0], [1.0, 0.971, 0.971, 1.0], [1.0, 0.954, 0.954, 1.0], [1.0, 0.937, 0.937, 1.0], [1.0, 0.926, 0.926, 1.0], [1.0, 0.909, 0.909, 1.0], [1.0, 0.891, 0.891, 1.0], [1.0, 0.874, 0.874, 1.0], [1.0, 0.857, 0.857, 1.0], [1.0, 0.846, 0.846, 1.0], [1.0, 0.829, 0.829, 1.0], [1.0, 0.811, 0.811, 1.0], [1.0, 0.794, 0.794, 1.0], [1.0, 0.783, 0.783, 1.0], [1.0, 0.766, 0.766, 1.0], [1.0, 0.749, 0.749, 1.0], [1.0, 0.731, 0.731, 1.0], [1.0, 0.714, 0.714, 1.0], [1.0, 0.703, 0.703, 1.0], [1.0, 0.686, 0.686, 1.0], [1.0, 0.669, 0.669, 1.0], [1.0, 0.651, 0.651, 1.0], [1.0, 0.64, 0.64, 1.0], [1.0, 0.623, 0.623, 1.0], [1.0, 0.606, 0.606, 1.0], [1.0, 0.589, 0.589, 1.0], [1.0, 0.571, 0.571, 1.0], [1.0, 0.56, 0.56, 1.0], [1.0, 0.543, 0.543, 1.0], [1.0, 0.526, 0.526, 1.0], [1.0, 0.509, 0.509, 1.0], [1.0, 0.497, 0.497, 1.0], [1.0, 0.48, 0.48, 1.0], [1.0, 0.463, 0.463, 1.0], [1.0, 0.446, 0.446, 1.0], [1.0, 0.429, 0.429, 1.0], [1.0, 0.417, 0.417, 1.0], [1.0, 0.4, 0.4, 1.0], [1.0, 0.383, 0.383, 1.0], [1.0, 0.366, 0.366, 1.0], [1.0, 0.354, 0.354, 1.0], [1.0, 0.337, 0.337, 1.0], [1.0, 0.32, 0.32, 1.0], [1.0, 0.303, 0.303, 1.0], [1.0, 0.286, 0.286, 1.0], [1.0, 0.274, 0.274, 1.0], [1.0, 0.257, 0.257, 1.0], [1.0, 0.24, 0.24, 1.0], [1.0, 0.223, 0.223, 1.0], [1.0, 0.211, 0.211, 1.0], [1.0, 0.194, 0.194, 1.0], [1.0, 0.177, 0.177, 1.0], [1.0, 0.16, 0.16, 1.0], [1.0, 0.143, 0.143, 1.0], [1.0, 0.131, 0.131, 1.0], [1.0, 0.114, 0.114, 1.0], [1.0, 0.097, 0.097, 1.0], [1.0, 0.08, 0.08, 1.0], [1.0, 0.069, 0.069, 1.0], [1.0, 0.051, 0.051, 1.0], [1.0, 0.034, 0.034, 1.0], [1.0, 0.017, 0.017, 1.0], [1.0, 0.0, 0.0, 1.0]], 'maxi': 10.0, 'mini': 0.0}
cm.configure(*(), **cfg)
| 606
| 3,525
| 0.490649
| 1,049
| 3,636
| 1.700667
| 0.188751
| 0.29148
| 0.2287
| 0.300448
| 0.387892
| 0.076794
| 0.07287
| 0.012332
| 0.012332
| 0.012332
| 0
| 0.542553
| 0.146865
| 3,636
| 5
| 3,526
| 727.2
| 0.03256
| 0
| 0
| 0
| 0
| 0
| 0.007701
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
dc8e07d983df431777b4927c48190385533d650c
| 4,264
|
py
|
Python
|
nicos_virt_mlz/kws2/setups/config_detector.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
nicos_virt_mlz/kws2/setups/config_detector.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
nicos_virt_mlz/kws2/setups/config_detector.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
description = 'presets for the detector position'
group = 'configdata'
# Assigns presets for the detector z position and x/y displacement of the
# beamstop for each selector preset.
#
# When you add a new detector z position, make sure to add a real offset as
# well in the DETECTOR_OFFSETS table below.
FIXED_X = 0.0
FIXED_X_TILT = 16.0
FIXED_Y = 520.0
DETECTOR_PRESETS = {
'2.9A tilt': {
'1.5m': dict(z=1.5, x=FIXED_X, y=FIXED_Y),
'1.5m DB': dict(z=1.5, x=FIXED_X, y=500.0),
'2m': dict(z=2, x=FIXED_X, y=FIXED_Y),
'4m': dict(z=4, x=FIXED_X, y=FIXED_Y),
'8m': dict(z=8, x=FIXED_X, y=FIXED_Y),
},
'4.66A': {
'1.5m': dict(z=1.5, x=FIXED_X, y=FIXED_Y),
'1.5m DB': dict(z=1.5, x=FIXED_X, y=500.0),
'2m': dict(z=2, x=FIXED_X, y=FIXED_Y),
'4m': dict(z=4, x=FIXED_X, y=FIXED_Y),
'8m DB': dict(z=8, x=FIXED_X, y=500.0),
'8m': dict(z=8, x=FIXED_X, y=FIXED_Y),
'14m': dict(z=14, x=FIXED_X, y=FIXED_Y),
'20m': dict(z=19.9, x=FIXED_X, y=FIXED_Y),
},
'5A': {
'1.5m': dict(z=1.5, x=FIXED_X, y=FIXED_Y),
'1.5m DB': dict(z=1.5, x=FIXED_X, y=620.0),
'2m': dict(z=2, x=FIXED_X, y=FIXED_Y),
'4m': dict(z=4, x=FIXED_X, y=FIXED_Y),
'6m': dict(z=6, x=FIXED_X, y=FIXED_Y),
'8m': dict(z=8, x=FIXED_X, y=FIXED_Y),
'8m DB': dict(z=8, x=FIXED_X, y=620.0),
'20m': dict(z=19.9, x=FIXED_X, y=FIXED_Y),
'20m DB': dict(z=19.9, x=FIXED_X, y=620.0),
},
'5A tilt': {
'1.5m': dict(z=1.5, x=FIXED_X_TILT, y=FIXED_Y),
'2m': dict(z=2, x=FIXED_X_TILT, y=FIXED_Y),
'2m DB': dict(z=2, x=FIXED_X_TILT, y=500.0),
'4m': dict(z=4, x=FIXED_X_TILT, y=FIXED_Y),
'6m': dict(z=6, x=FIXED_X_TILT, y=FIXED_Y),
'8m': dict(z=8, x=FIXED_X_TILT, y=FIXED_Y),
'8m DB': dict(z=8, x=FIXED_X_TILT, y=500.0),
'20m': dict(z=19.9, x=FIXED_X_TILT, y=FIXED_Y),
},
'7A': {
'1.5m': dict(z=1.5, x=FIXED_X, y=FIXED_Y),
'1.5m DB': dict(z=1.5, x=FIXED_X, y=500.0),
'2m': dict(z=2, x=FIXED_X, y=FIXED_Y),
'4m': dict(z=4, x=FIXED_X, y=FIXED_Y),
'8m': dict(z=8, x=FIXED_X, y=FIXED_Y),
'8m DB': dict(z=8, x=FIXED_X, y=300.0),
'20m DB': dict(z=19.9, x=FIXED_X, y=300.0),
'20m': dict(z=19.9, x=FIXED_X, y=FIXED_Y),
},
'7A tilt': {
'1.5m': dict(z=1.5, x=FIXED_X_TILT, y=FIXED_Y),
'1.5m DB': dict(z=1.5, x=FIXED_X_TILT, y=500.0),
'2m': dict(z=2, x=FIXED_X_TILT, y=FIXED_Y),
'2m DB': dict(z=2, x=FIXED_X_TILT, y=500.0),
'4m': dict(z=4, x=FIXED_X_TILT, y=FIXED_Y),
'8m': dict(z=8, x=FIXED_X_TILT, y=FIXED_Y),
'8m DB': dict(z=8, x=FIXED_X_TILT, y=500.0),
'20m': dict(z=19.9, x=FIXED_X_TILT, y=FIXED_Y),
},
'10A': {
'1.5m': dict(z=1.5, x=FIXED_X, y=FIXED_Y),
'1.5m DB': dict(z=1.5, x=FIXED_X, y=500.0),
'2m': dict(z=2, x=FIXED_X, y=FIXED_Y),
'4m': dict(z=4, x=FIXED_X, y=FIXED_Y),
'8m': dict(z=8, x=FIXED_X, y=FIXED_Y),
'8m DB': dict(z=8, x=FIXED_X, y=620.0),
'20m': dict(z=19.9, x=FIXED_X, y=FIXED_Y),
'20m DB': dict(z=19.9, x=FIXED_X, y=300.0),
},
'19A': {
'2m': dict(z=2, x=FIXED_X, y=FIXED_Y),
'8m': dict(z=8, x=FIXED_X, y=FIXED_Y),
'20m': dict(z=19.9, x=FIXED_X, y=FIXED_Y),
},
}
SMALL_DET_POSITION = 17.0
# This offset is added to 20m + det_z to get the chopper-detector length
# for time-of-flight mode calculation.
#
# It varies with detector distance because the det_z value is not actually
# particularly accurate.
DETECTOR_OFFSETS = {
1.5: 0.7,
2: 0.7,
2.1: 0.7,
4: 0.7,
4.1: 0.7,
6: 0.7,
8: 0.7,
8.1: 0.7,
14: 0.7,
17.0: 0.7, # for small detector
19.9: 0.7,
}
| 38.414414
| 75
| 0.48546
| 800
| 4,264
| 2.4325
| 0.11375
| 0.181912
| 0.205036
| 0.168551
| 0.708119
| 0.708119
| 0.702467
| 0.700411
| 0.700411
| 0.698356
| 0
| 0.107292
| 0.324578
| 4,264
| 110
| 76
| 38.763636
| 0.568403
| 0.104362
| 0
| 0.521277
| 0
| 0
| 0.074337
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc9681057f7d9d87d8907cd895972f5214259209
| 27,842
|
py
|
Python
|
fhir/resources/tests/test_observation.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/tests/test_observation.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/tests/test_observation.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/Observation
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import observation
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class ObservationTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("Observation", js["resourceType"])
return observation.Observation(js)
def testObservation1(self):
inst = self.instantiate_from("observation-example-genetics-1.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation1(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation1(inst2)
def implObservation1(self, inst):
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("55233-1"))
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes(
"Genetic analysis master panel-- This is the parent OBR for the panel holding all of the associated observations that can be reported with a molecular genetics analysis result."
),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(
force_bytes(inst.extension[0].url),
force_bytes(
"http://hl7.org/fhir/StructureDefinition/observation-geneticsGene"
),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].code),
force_bytes("3236"),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].display),
force_bytes("EGFR"),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].system),
force_bytes("http://www.genenames.org"),
)
self.assertEqual(
force_bytes(inst.extension[1].url),
force_bytes(
"http://hl7.org/fhir/StructureDefinition/observation-geneticsDNARegionName"
),
)
self.assertEqual(
force_bytes(inst.extension[1].valueString), force_bytes("Exon 21")
)
self.assertEqual(
force_bytes(inst.extension[2].url),
force_bytes(
"http://hl7.org/fhir/StructureDefinition/observation-geneticsGenomicSourceClass"
),
)
self.assertEqual(
force_bytes(inst.extension[2].valueCodeableConcept.coding[0].code),
force_bytes("LA6684-0"),
)
self.assertEqual(
force_bytes(inst.extension[2].valueCodeableConcept.coding[0].display),
force_bytes("somatic"),
)
self.assertEqual(
force_bytes(inst.extension[2].valueCodeableConcept.coding[0].system),
force_bytes("http://loinc.org"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-genetics-1"))
self.assertEqual(inst.issued.date, FHIRDate("2013-04-03T15:30:10+01:00").date)
self.assertEqual(inst.issued.as_json(), "2013-04-03T15:30:10+01:00")
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.valueCodeableConcept.coding[0].code),
force_bytes("10828004"),
)
self.assertEqual(
force_bytes(inst.valueCodeableConcept.coding[0].display),
force_bytes("Positive"),
)
self.assertEqual(
force_bytes(inst.valueCodeableConcept.coding[0].system),
force_bytes("http://snomed.info/sct"),
)
def testObservation2(self):
inst = self.instantiate_from("observation-example-bmd.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation2(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation2(inst2)
def implObservation2(self, inst):
self.assertEqual(
force_bytes(inst.bodySite.coding[0].code),
force_bytes("71341001:272741003=7771000"),
)
self.assertEqual(
force_bytes(inst.bodySite.coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(force_bytes(inst.bodySite.text), force_bytes("Left Femur"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("24701-5"))
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes("Femur DXA Bone density"),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(force_bytes(inst.code.text), force_bytes("BMD - Left Femur"))
self.assertEqual(force_bytes(inst.id), force_bytes("bmd"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("g/cm-2"))
self.assertEqual(
force_bytes(inst.valueQuantity.system),
force_bytes("http://unitsofmeasure.org"),
)
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("g/cm²"))
self.assertEqual(inst.valueQuantity.value, 0.887)
def testObservation3(self):
inst = self.instantiate_from("observation-example-respiratory-rate.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation3(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation3(inst2)
def implObservation3(self, inst):
self.assertEqual(
force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"),
)
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("9279-1"))
self.assertEqual(
force_bytes(inst.code.coding[0].display), force_bytes("Respiratory rate")
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(force_bytes(inst.code.text), force_bytes("Respiratory rate"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("1999-07-02").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "1999-07-02")
self.assertEqual(force_bytes(inst.id), force_bytes("respiratory-rate"))
self.assertEqual(
force_bytes(inst.meta.profile[0]),
force_bytes("http://hl7.org/fhir/StructureDefinition/vitalsigns"),
)
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("/min"))
self.assertEqual(
force_bytes(inst.valueQuantity.system),
force_bytes("http://unitsofmeasure.org"),
)
self.assertEqual(
force_bytes(inst.valueQuantity.unit), force_bytes("breaths/minute")
)
self.assertEqual(inst.valueQuantity.value, 26)
def testObservation4(self):
inst = self.instantiate_from("observation-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation4(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation4(inst2)
def implObservation4(self, inst):
self.assertEqual(
force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"),
)
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("29463-7"))
self.assertEqual(
force_bytes(inst.code.coding[0].display), force_bytes("Body Weight")
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(force_bytes(inst.code.coding[1].code), force_bytes("3141-9"))
self.assertEqual(
force_bytes(inst.code.coding[1].display),
force_bytes("Body weight Measured"),
)
self.assertEqual(
force_bytes(inst.code.coding[1].system), force_bytes("http://loinc.org")
)
self.assertEqual(force_bytes(inst.code.coding[2].code), force_bytes("27113001"))
self.assertEqual(
force_bytes(inst.code.coding[2].display), force_bytes("Body weight")
)
self.assertEqual(
force_bytes(inst.code.coding[2].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(
force_bytes(inst.code.coding[3].code), force_bytes("body-weight")
)
self.assertEqual(
force_bytes(inst.code.coding[3].display), force_bytes("Body Weight")
)
self.assertEqual(
force_bytes(inst.code.coding[3].system),
force_bytes("http://acme.org/devices/clinical-codes"),
)
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("2016-03-28").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "2016-03-28")
self.assertEqual(force_bytes(inst.id), force_bytes("example"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("[lb_av]"))
self.assertEqual(
force_bytes(inst.valueQuantity.system),
force_bytes("http://unitsofmeasure.org"),
)
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("lbs"))
self.assertEqual(inst.valueQuantity.value, 185)
def testObservation5(self):
inst = self.instantiate_from("observation-example-haplotype2.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation5(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation5(inst2)
def implObservation5(self, inst):
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("55233-1"))
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes(
"Genetic analysis master panel-- This is the parent OBR for the panel holding all of the associated observations that can be reported with a molecular genetics analysis result."
),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(
force_bytes(inst.extension[0].url),
force_bytes(
"http://hl7.org/fhir/StructureDefinition/observation-geneticsGene"
),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].code),
force_bytes("2623"),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].display),
force_bytes("CYP2C9"),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].system),
force_bytes("http://www.genenames.org"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-haplotype2"))
self.assertEqual(inst.issued.date, FHIRDate("2013-04-03T15:30:10+01:00").date)
self.assertEqual(inst.issued.as_json(), "2013-04-03T15:30:10+01:00")
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("unknown"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.valueCodeableConcept.coding[0].code),
force_bytes("PA16581679"),
)
self.assertEqual(
force_bytes(inst.valueCodeableConcept.coding[0].display), force_bytes("*4")
)
self.assertEqual(
force_bytes(inst.valueCodeableConcept.coding[0].system),
force_bytes("http://pharmakb.org"),
)
def testObservation6(self):
inst = self.instantiate_from("observation-example-mbp.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation6(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation6(inst2)
def implObservation6(self, inst):
self.assertEqual(
force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"),
)
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("8478-0"))
self.assertEqual(
force_bytes(inst.code.coding[0].display), force_bytes("Mean blood pressure")
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(
force_bytes(inst.code.text), force_bytes("Mean blood pressure")
)
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("1999-07-02").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "1999-07-02")
self.assertEqual(force_bytes(inst.id), force_bytes("mbp"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("mm[Hg]"))
self.assertEqual(
force_bytes(inst.valueQuantity.system),
force_bytes("http://unitsofmeasure.org"),
)
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("mm[Hg]"))
self.assertEqual(inst.valueQuantity.value, 80)
def testObservation7(self):
inst = self.instantiate_from("observation-example-genetics-brcapat.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation7(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation7(inst2)
def implObservation7(self, inst):
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("59041-4"))
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes(
"BRCA1+BRCA2 gene mutations tested for in Blood or Tissue by Molecular genetics method Nominal"
),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(
force_bytes(inst.extension[0].url),
force_bytes(
"http://hl7.org/fhir/StructureDefinition/observation-geneticsGene"
),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].code),
force_bytes("KX470182.1"),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].display),
force_bytes("BRCA"),
)
self.assertEqual(
force_bytes(inst.extension[0].valueCodeableConcept.coding[0].system),
force_bytes("https://www.ncbi.nlm.nih.gov/nuccore"),
)
self.assertEqual(
force_bytes(inst.extension[1].url),
force_bytes(
"http://hl7.org/fhir/us/core/StructureDefinition/us-core-ethnicity"
),
)
self.assertEqual(
force_bytes(inst.extension[1].valueCodeableConcept.coding[0].code),
force_bytes("413581001"),
)
self.assertEqual(
force_bytes(inst.extension[1].valueCodeableConcept.coding[0].display),
force_bytes("Unknown racial group"),
)
self.assertEqual(
force_bytes(inst.extension[1].valueCodeableConcept.coding[0].system),
force_bytes("http://browser.ihtsdotools.org/"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("example-genetics-brcapat"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testObservation8(self):
inst = self.instantiate_from("observation-example-bmi.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation8(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation8(inst2)
def implObservation8(self, inst):
self.assertEqual(
force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"),
)
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("39156-5"))
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes("Body mass index (BMI) [Ratio]"),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(force_bytes(inst.code.text), force_bytes("BMI"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("1999-07-02").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "1999-07-02")
self.assertEqual(force_bytes(inst.id), force_bytes("bmi"))
self.assertEqual(
force_bytes(inst.meta.profile[0]),
force_bytes("http://hl7.org/fhir/StructureDefinition/vitalsigns"),
)
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("kg/m2"))
self.assertEqual(
force_bytes(inst.valueQuantity.system),
force_bytes("http://unitsofmeasure.org"),
)
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("kg/m2"))
self.assertEqual(inst.valueQuantity.value, 16.2)
def testObservation9(self):
inst = self.instantiate_from("observation-example-body-height.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation9(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation9(inst2)
def implObservation9(self, inst):
self.assertEqual(
force_bytes(inst.category[0].coding[0].code), force_bytes("vital-signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].display), force_bytes("Vital Signs")
)
self.assertEqual(
force_bytes(inst.category[0].coding[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/observation-category"),
)
self.assertEqual(force_bytes(inst.category[0].text), force_bytes("Vital Signs"))
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("8302-2"))
self.assertEqual(
force_bytes(inst.code.coding[0].display), force_bytes("Body height")
)
self.assertEqual(
force_bytes(inst.code.coding[0].system), force_bytes("http://loinc.org")
)
self.assertEqual(force_bytes(inst.code.text), force_bytes("Body height"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("1999-07-02").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "1999-07-02")
self.assertEqual(force_bytes(inst.id), force_bytes("body-height"))
self.assertEqual(
force_bytes(inst.meta.profile[0]),
force_bytes("http://hl7.org/fhir/StructureDefinition/vitalsigns"),
)
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueQuantity.code), force_bytes("[in_i]"))
self.assertEqual(
force_bytes(inst.valueQuantity.system),
force_bytes("http://unitsofmeasure.org"),
)
self.assertEqual(force_bytes(inst.valueQuantity.unit), force_bytes("in"))
self.assertEqual(inst.valueQuantity.value, 66.89999999999999)
def testObservation10(self):
inst = self.instantiate_from("observation-example-eye-color.json")
self.assertIsNotNone(inst, "Must have instantiated a Observation instance")
self.implObservation10(inst)
js = inst.as_json()
self.assertEqual("Observation", js["resourceType"])
inst2 = observation.Observation(js)
self.implObservation10(inst2)
def implObservation10(self, inst):
self.assertEqual(force_bytes(inst.code.text), force_bytes("eye color"))
self.assertEqual(inst.effectiveDateTime.date, FHIRDate("2016-05-18").date)
self.assertEqual(inst.effectiveDateTime.as_json(), "2016-05-18")
self.assertEqual(force_bytes(inst.id), force_bytes("eye-color"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(force_bytes(inst.status), force_bytes("final"))
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.valueString), force_bytes("blue"))
| 44.476038
| 193
| 0.638496
| 3,098
| 27,842
| 5.61459
| 0.090704
| 0.200644
| 0.200069
| 0.250086
| 0.887202
| 0.871162
| 0.859204
| 0.823445
| 0.794067
| 0.779809
| 0
| 0.029159
| 0.227678
| 27,842
| 625
| 194
| 44.5472
| 0.779752
| 0.006285
| 0
| 0.544674
| 0
| 0.003436
| 0.173361
| 0.018113
| 0
| 0
| 0
| 0
| 0.372852
| 1
| 0.036082
| false
| 0
| 0.013746
| 0
| 0.053265
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
760a692226e9739eeff7ef4dc62a27752a8f9ead
| 840
|
py
|
Python
|
pyaz/monitor/activity_log/alert/scope/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/monitor/activity_log/alert/scope/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/monitor/activity_log/alert/scope/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from ..... pyaz_utils import _call_az
def add(name, resource_group, scope, reset=None):
'''
Add scopes to this activity log alert.
Required Parameters:
- name -- None
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- scope -- None
Optional Parameters:
- reset -- None
'''
return _call_az("az monitor activity-log alert scope add", locals())
def remove(name, resource_group, scope):
'''
Removes scopes from this activity log alert.
Required Parameters:
- name -- None
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- scope -- None
'''
return _call_az("az monitor activity-log alert scope remove", locals())
| 28.965517
| 128
| 0.671429
| 108
| 840
| 5.12037
| 0.333333
| 0.141049
| 0.115732
| 0.079566
| 0.701627
| 0.701627
| 0.701627
| 0.701627
| 0.701627
| 0.701627
| 0
| 0
| 0.225
| 840
| 28
| 129
| 30
| 0.849462
| 0.567857
| 0
| 0
| 0
| 0
| 0.278351
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
76203cf07731b5c655459678767c0a81b833d716
| 65
|
py
|
Python
|
pygsuite/utility/guids.py
|
gitter-badger/pygsuite
|
536766c36f653edbc7585141f1c3327f508e19da
|
[
"MIT"
] | null | null | null |
pygsuite/utility/guids.py
|
gitter-badger/pygsuite
|
536766c36f653edbc7585141f1c3327f508e19da
|
[
"MIT"
] | null | null | null |
pygsuite/utility/guids.py
|
gitter-badger/pygsuite
|
536766c36f653edbc7585141f1c3327f508e19da
|
[
"MIT"
] | null | null | null |
from uuid import uuid4
def get_guid():
return str(uuid4())
| 10.833333
| 23
| 0.676923
| 10
| 65
| 4.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0.215385
| 65
| 5
| 24
| 13
| 0.803922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8743618cb8d520bc2d553a23c39a328e450a7aa1
| 118
|
py
|
Python
|
baseline/tf/lm/training/__init__.py
|
shar999/mead-baseline
|
bd9cd02c0a1d9c0df91aca171774a6967e6ce190
|
[
"Apache-2.0"
] | 241
|
2016-04-25T20:02:31.000Z
|
2019-09-03T05:44:09.000Z
|
baseline/tf/lm/training/__init__.py
|
shar999/mead-baseline
|
bd9cd02c0a1d9c0df91aca171774a6967e6ce190
|
[
"Apache-2.0"
] | 42
|
2017-08-21T16:04:36.000Z
|
2019-09-30T20:45:17.000Z
|
baseline/tf/lm/training/__init__.py
|
shar999/mead-baseline
|
bd9cd02c0a1d9c0df91aca171774a6967e6ce190
|
[
"Apache-2.0"
] | 75
|
2016-06-28T01:18:58.000Z
|
2019-08-29T06:47:22.000Z
|
import tensorflow as tf
from baseline.tf.lm.training.eager import *
from baseline.tf.lm.training.distributed import *
| 29.5
| 49
| 0.813559
| 18
| 118
| 5.333333
| 0.555556
| 0.25
| 0.291667
| 0.333333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 118
| 3
| 50
| 39.333333
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8745bf1f03aab31b98850fe7136d547a644ccad0
| 154
|
py
|
Python
|
singletask_sql/tables/utils/query.py
|
lenaKuznetsova/singletask-sql
|
460d1c3ca41e3a5c4ca263a4ebe03ab7664ddcdb
|
[
"MIT"
] | null | null | null |
singletask_sql/tables/utils/query.py
|
lenaKuznetsova/singletask-sql
|
460d1c3ca41e3a5c4ca263a4ebe03ab7664ddcdb
|
[
"MIT"
] | null | null | null |
singletask_sql/tables/utils/query.py
|
lenaKuznetsova/singletask-sql
|
460d1c3ca41e3a5c4ca263a4ebe03ab7664ddcdb
|
[
"MIT"
] | null | null | null |
from singletask_sql.tables.constants import INCLUDED_DELETED
def include_deleted(query):
return query.execution_options(**{INCLUDED_DELETED: True})
| 25.666667
| 62
| 0.818182
| 19
| 154
| 6.368421
| 0.789474
| 0.247934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097403
| 154
| 5
| 63
| 30.8
| 0.870504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
5e8c6938a8085737ba4677787be87afe4ca05f1f
| 733
|
py
|
Python
|
pysit/objective_functions/__init__.py
|
zfang-slim/PysitForPython3
|
dc60537b26018e28d92b7a956a2cf96775f0bdf9
|
[
"BSD-3-Clause"
] | null | null | null |
pysit/objective_functions/__init__.py
|
zfang-slim/PysitForPython3
|
dc60537b26018e28d92b7a956a2cf96775f0bdf9
|
[
"BSD-3-Clause"
] | null | null | null |
pysit/objective_functions/__init__.py
|
zfang-slim/PysitForPython3
|
dc60537b26018e28d92b7a956a2cf96775f0bdf9
|
[
"BSD-3-Clause"
] | 1
|
2020-06-13T07:13:07.000Z
|
2020-06-13T07:13:07.000Z
|
from pysit.objective_functions.objective_function import *
from pysit.objective_functions.temporal_least_squares import *
from pysit.objective_functions.hybrid_least_squares import *
from pysit.objective_functions.frequency_least_squares import *
# from pysit.objective_functions.temporal_least_squares_includePML import *
from pysit.objective_functions.temporal_envelope import *
from pysit.objective_functions.temporal_extended_imaging_inversion import *
from pysit.objective_functions.temporal_correlate import *
from pysit.objective_functions.temporal_optimal_transport import *
from pysit.objective_functions.temporal_least_squares_cnn import *
#from pysit.objective_functions.temporal_extended_imaging_inversion_sub import *
| 52.357143
| 80
| 0.888131
| 90
| 733
| 6.844444
| 0.233333
| 0.160714
| 0.321429
| 0.482143
| 0.814935
| 0.814935
| 0.61526
| 0.469156
| 0.211039
| 0
| 0
| 0
| 0.06412
| 733
| 13
| 81
| 56.384615
| 0.897959
| 0.207367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0d9be1eae9adc0830328284806d740c1e0389b95
| 16,424
|
py
|
Python
|
lib/FakeObjectsForTests/FakeObjectsForTestsClient.py
|
r2sunita/SetAPI
|
4ed769ed9678c057c7ded05fb93b9b7dc0874fc2
|
[
"MIT"
] | null | null | null |
lib/FakeObjectsForTests/FakeObjectsForTestsClient.py
|
r2sunita/SetAPI
|
4ed769ed9678c057c7ded05fb93b9b7dc0874fc2
|
[
"MIT"
] | null | null | null |
lib/FakeObjectsForTests/FakeObjectsForTestsClient.py
|
r2sunita/SetAPI
|
4ed769ed9678c057c7ded05fb93b9b7dc0874fc2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
############################################################
#
# Autogenerated by the KBase type compiler -
# any changes made here will be overwritten
#
############################################################
from __future__ import print_function
# the following is a hack to get the baseclient to import whether we're in a
# package or not. This makes pep8 unhappy hence the annotations.
try:
# baseclient and this client are in a package
from .baseclient import BaseClient as _BaseClient # @UnusedImport
except:
# no they aren't
from baseclient import BaseClient as _BaseClient # @Reimport
import time
class FakeObjectsForTests(object):
def __init__(
self, url=None, timeout=30 * 60, user_id=None,
password=None, token=None, ignore_authrc=False,
trust_all_ssl_certificates=False,
auth_svc='https://kbase.us/services/authorization/Sessions/Login',
service_ver='dev',
async_job_check_time_ms=100, async_job_check_time_scale_percent=150,
async_job_check_max_time_ms=300000):
if url is None:
raise ValueError('A url is required')
self._service_ver = service_ver
self._client = _BaseClient(
url, timeout=timeout, user_id=user_id, password=password,
token=token, ignore_authrc=ignore_authrc,
trust_all_ssl_certificates=trust_all_ssl_certificates,
auth_svc=auth_svc,
async_job_check_time_ms=async_job_check_time_ms,
async_job_check_time_scale_percent=async_job_check_time_scale_percent,
async_job_check_max_time_ms=async_job_check_max_time_ms)
def _check_job(self, job_id):
return self._client._check_job('FakeObjectsForTests', job_id)
def _create_any_objects_submit(self, params, context=None):
return self._client._submit_job(
'FakeObjectsForTests.create_any_objects', [params],
self._service_ver, context)
def create_any_objects(self, params, context=None):
"""
:param params: instance of type "CreateAnyObjectsParams"
(ws_id/ws_name - two alternative ways to set target workspace,
obj_names - list of names for target workspace objects, metadata -
optional metadata.) -> structure: parameter "ws_id" of Long,
parameter "ws_name" of String, parameter "obj_names" of list of
String, parameter "metadata" of mapping from String to String
:returns: instance of list of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
"""
job_id = self._create_any_objects_submit(params, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
def _create_fake_genomes_submit(self, params, context=None):
return self._client._submit_job(
'FakeObjectsForTests.create_fake_genomes', [params],
self._service_ver, context)
def create_fake_genomes(self, params, context=None):
"""
:param params: instance of type "CreateFakeGenomesParams"
(ws_id/ws_name - two alternative ways to set target workspace,
obj_names - list of names for target workspace objects (of type
'KBaseGenomes.Genome'), metadata - optional metadata.) ->
structure: parameter "ws_id" of Long, parameter "ws_name" of
String, parameter "obj_names" of list of String, parameter
"metadata" of mapping from String to String
:returns: instance of list of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
"""
job_id = self._create_fake_genomes_submit(params, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
def _create_fake_reads_submit(self, params, context=None):
return self._client._submit_job(
'FakeObjectsForTests.create_fake_reads', [params],
self._service_ver, context)
def create_fake_reads(self, params, context=None):
"""
:param params: instance of type "CreateFakeReadsParams"
(ws_id/ws_name - two alternative ways to set target workspace,
obj_names - list of names for target workspace objects (of type
'KBaseFile.SingleEndLibrary'), metadata - optional metadata.) ->
structure: parameter "ws_id" of Long, parameter "ws_name" of
String, parameter "obj_names" of list of String, parameter
"metadata" of mapping from String to String
:returns: instance of list of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
"""
job_id = self._create_fake_reads_submit(params, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
def status(self, context=None):
job_id = self._client._submit_job('FakeObjectsForTests.status',
[], self._service_ver, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
| 60.605166
| 82
| 0.656905
| 2,254
| 16,424
| 4.611358
| 0.115794
| 0.037714
| 0.061285
| 0.062151
| 0.897922
| 0.892438
| 0.876467
| 0.873004
| 0.865307
| 0.841543
| 0
| 0.016941
| 0.270397
| 16,424
| 270
| 83
| 60.82963
| 0.850455
| 0.627618
| 0
| 0.516854
| 1
| 0
| 0.061177
| 0.029636
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101124
| false
| 0.022472
| 0.044944
| 0.044944
| 0.247191
| 0.011236
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0dc8763982c79f92b9a20691c931f8675864e2dd
| 1,136
|
py
|
Python
|
test/pdetools_spectral_test.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 23
|
2018-05-25T02:16:59.000Z
|
2022-03-24T06:56:34.000Z
|
test/pdetools_spectral_test.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 1
|
2019-06-11T06:59:21.000Z
|
2019-06-11T06:59:40.000Z
|
test/pdetools_spectral_test.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 8
|
2018-08-29T16:43:12.000Z
|
2022-01-17T11:54:40.000Z
|
#%%
import torch
import aTEAM
import aTEAM.pdetools.spectral as spectral
import aTEAM.pdetools.init as init
import aTEAM.nn.functional as aF
size = 100
dx = 1/size
u = init.initgen(mesh_size=[size,size], freq=4)
mesh_bound = [[0,0],[1,1]]
# u = u.to(dtype=torch.float32)
upad = aF.periodicpad(u, [0,0,1,1])
u_spect = spectral.time2spect(u, signal_ndim=2)
u10_spect = spectral.spect_diff(u_spect, signal_ndim=2, order=[1,0], mesh_bound=mesh_bound)
u10 = spectral.spect2time(u10_spect, signal_ndim=2)
print(((u10-(upad[2:]-upad[:-2])/(2*dx)).norm()/u10.norm()).item())
#%%
import torch
import aTEAM
import aTEAM.pdetools.spectral as spectral
import aTEAM.pdetools.init as init
import aTEAM.nn.functional as aF
size = 10000
dx = 1/size
u = init.initgen(mesh_size=[size,], freq=3)
mesh_bound = [[0,],[1,]]
# u = u.to(dtype=torch.float32)
upad = aF.periodicpad(u, [1,1])
u_spect = spectral.time2spect(u, signal_ndim=1)
u10_spect = spectral.spect_diff(u_spect, signal_ndim=1, order=[1,], mesh_bound=mesh_bound)
u10 = spectral.spect2time(u10_spect, signal_ndim=1)
print(((u10-(upad[2:]-upad[:-2])/(2*dx)).norm()/u10.norm()).item())
#%%
| 27.707317
| 91
| 0.713908
| 196
| 1,136
| 4.015306
| 0.204082
| 0.111817
| 0.096569
| 0.055909
| 0.923761
| 0.917408
| 0.917408
| 0.917408
| 0.917408
| 0.640407
| 0
| 0.065558
| 0.100352
| 1,136
| 40
| 92
| 28.4
| 0.704501
| 0.058099
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.357143
| 0
| 0.357143
| 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
0ddf383708415659b3f33bdc94cfde6205f071e1
| 63
|
py
|
Python
|
storage_server/testing.py
|
khaledismaeel/Simple-DFS
|
2c2481213b25aec25e6de3eb56c9671b83303147
|
[
"Unicode-DFS-2016",
"Unicode-DFS-2015"
] | null | null | null |
storage_server/testing.py
|
khaledismaeel/Simple-DFS
|
2c2481213b25aec25e6de3eb56c9671b83303147
|
[
"Unicode-DFS-2016",
"Unicode-DFS-2015"
] | null | null | null |
storage_server/testing.py
|
khaledismaeel/Simple-DFS
|
2c2481213b25aec25e6de3eb56c9671b83303147
|
[
"Unicode-DFS-2016",
"Unicode-DFS-2015"
] | null | null | null |
print("==================================\nServer response...")
| 63
| 63
| 0.31746
| 3
| 63
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 63
| 1
| 63
| 63
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0.84375
| 0.65625
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
df26733eb6aee877991603b2cc77b6050bdecd3e
| 106
|
py
|
Python
|
ccal/normalize_path.py
|
alex-wenzel/ccal
|
74dfc604d93e6ce9e12f34a828b601618df51faa
|
[
"MIT"
] | null | null | null |
ccal/normalize_path.py
|
alex-wenzel/ccal
|
74dfc604d93e6ce9e12f34a828b601618df51faa
|
[
"MIT"
] | null | null | null |
ccal/normalize_path.py
|
alex-wenzel/ccal
|
74dfc604d93e6ce9e12f34a828b601618df51faa
|
[
"MIT"
] | null | null | null |
from os.path import abspath, expanduser
def normalize_path(path):
return abspath(expanduser(path))
| 15.142857
| 39
| 0.764151
| 14
| 106
| 5.714286
| 0.642857
| 0.425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150943
| 106
| 6
| 40
| 17.666667
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
df7715f6b6f7a749644bfed0e6a06ae667858536
| 34,794
|
py
|
Python
|
data_plotting/plot_errors_norms.py
|
qgoestch/sinecity_testcases
|
ec04ba707ff69b5c1b4b42e56e522855a2f34a65
|
[
"BSD-3-Clause"
] | null | null | null |
data_plotting/plot_errors_norms.py
|
qgoestch/sinecity_testcases
|
ec04ba707ff69b5c1b4b42e56e522855a2f34a65
|
[
"BSD-3-Clause"
] | null | null | null |
data_plotting/plot_errors_norms.py
|
qgoestch/sinecity_testcases
|
ec04ba707ff69b5c1b4b42e56e522855a2f34a65
|
[
"BSD-3-Clause"
] | 1
|
2021-02-18T13:07:10.000Z
|
2021-02-18T13:07:10.000Z
|
# -*- coding: utf-8 -*-
##
# \file plot_errors_norms.py
# \title Errors and norms for each case.
# \author Pierre Chobeau
# \version 0.1
# \license BSD 3-Clause License
# \inst UMRAE (Ifsttar Nantes), LAUM (Le Mans Université)
# \date 2017, 12 Oct.
##
import numpy as np
import matplotlib.ticker
from matplotlib import pyplot as plt
import os
base_path = reduce (lambda l,r: l + os.path.sep + r,
os.path.dirname( os.path.realpath( __file__ ) ).split( os.path.sep ) )
def plot_error_basic(h_set, one_norm, two_norm, max_norm,
ord_acc_one, ord_acc_two, ord_acc_max,
case, save_fig):
"""
Main plot made of 3 subplots that show (1) the avaraged error,
(2) the two-norm of the error and (3) the max-norm of the error.
:param h_set: spatial step sequence (m).
:type h_set: list of floats
:param avg_error: error averaged over all receivers for
each spatial step.
:type avg_error_tlm: 1d-array
:param two_norm: relative error in the 2-norm for
each spatial step.
:type two_norm: 1d-array
:param max_norm: relative error in the MAX-norm for
each spatial step.
:type max_norm: 1d-array
:param ord_acc: order of accuracy between two consecutive grids in
the 2-norm.
:param case: integer that sorts of the saved folders in the results dir.
:type case: int
:param save_fig: save or not the figure.
:type save_fig: bool
:type ord_acc: 1d-array
:return: two graphs: the errors and norms,and the order of accuracy.
"""
print 'Plotting the errors'
h_th = np.linspace(h_set[0] - 0.001, h_set[-1] + 0.001, 100)
j = 2
# =========================================================================
# All grids figure
# =========================================================================
fig = plt.figure('Errors', figsize=(14, 4.2))
ax = fig.add_subplot(131)
ax.loglog(h_set, one_norm[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
error_margin = 0.02 * (h_set[j] / h_set[j]) ** 2 * one_norm[j]
scnd_ord_th = (h_set / h_set[j]) ** 2 * one_norm[j] + error_margin
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * one_norm[j], 'k-', lw=1.5)
plt.legend(('FD', '2nd order'), fontsize=14)
# =========================================================================
# Linear regression on log log
# =========================================================================
coefs = np.polyfit(h_set, one_norm, 1)
poly = np.poly1d(coefs)
ys = poly(h_set)
# yhat = 10. ** (np.polyval(coefs, one_norm))
# ax.loglog(h_set, ys, 'y--', lw=3)
# m, n, c = np.polyfit(h_set, np.log10(one_norm), 2) # fit log(y) = m*log(x) + c
# y_fit = np.power(10, m * h_set**2 + n*h_set + c) # calculate the fitted values of y
m, c = np.polyfit(h_set, np.log10(one_norm), 1) # fit log(y) = m*log(x) + c
y_fit = np.power(10, m * h_set + c) # calculate the fitted values of y
# print m, c
# plt.plot(h_set, y_fit, 'y--', lw=3)
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{1}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -7, 10 ** -1)
plt.tight_layout()
ax = fig.add_subplot(132)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * two_norm[j], 'k-', lw=1.5)
ax.loglog(h_set, two_norm[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -7, 10 ** -1)
plt.tight_layout()
ax = fig.add_subplot(133)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * max_norm[j], 'k-', lw=1.5)
ax.loglog(h_set, max_norm[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -7, 10 ** -1)
plt.tight_layout()
if save_fig:
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case%i' % case, 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'errors_fd.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_fd.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_fd.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
# =========================================================================
# SORTED grids figure
# =========================================================================
fig = plt.figure('Errors SORTED', figsize=(14, 4.2))
ax = fig.add_subplot(131)
error_margin = 0.02 * (h_set[j] / h_set[j]) ** 2 * one_norm[j]
scnd_ord_th_one = (h_set / h_set[j]) ** 2 * one_norm[j] + error_margin
ax.loglog(h_set, scnd_ord_th_one, 'm--', lw=1)
cond = np.less_equal(one_norm, scnd_ord_th_one)
ax.loglog(np.extract(cond, h_set), np.extract(cond, one_norm), 'ro',
markersize=6, markeredgewidth=1.2, markeredgecolor='r',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{1}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
# plt.ylim(10 ** -3, 10 ** -1)
plt.tight_layout()
ax = fig.add_subplot(132)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * two_norm[j], 'm--', lw=1)
ax.loglog(np.extract(cond, h_set), np.extract(cond, two_norm), 'ro',
markersize=6, markeredgewidth=1.2, markeredgecolor='r',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
# plt.ylim(min(), 10 ** -1)
plt.tight_layout()
ax = fig.add_subplot(133)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * max_norm[j], 'm--', lw=1)
ax.loglog(np.extract(cond, h_set), np.extract(cond, max_norm), 'ro',
markersize=6, markeredgewidth=1.2, markeredgecolor='r',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
# plt.ylim(10 ** -3, 10 ** -1)
plt.tight_layout()
# res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
# 'results', 'case%i' % case, 'figures')
# if not os.path.exists(res_path):
# os.makedirs(res_path)
# plt.savefig(os.path.join(res_path, 'errors.eps'), transparent=True,
# bbox_inches='tight', pad_inches=0)
# plt.savefig(os.path.join(res_path, 'errors.png'), transparent=True,
# bbox_inches='tight', pad_inches=0)
# plt.savefig(os.path.join(res_path, 'errors.pdf'), transparent=True,
# bbox_inches='tight', pad_inches=0)
# =========================================================================
# Order of accuracy btw. 2 consecutive points
# =========================================================================
fig = plt.figure('Order of accuracy', figsize=(14, 4.2))
ax = fig.add_subplot(131)
ax.semilogx(h_set[:-1], ord_acc_one[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(132)
ax.semilogx(h_set[:-1], ord_acc_two[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(133)
ax.semilogx(h_set[:-1], ord_acc_max[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
plt.tight_layout()
if save_fig:
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case%i' % case, 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'ord_acc_fd.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.show()
def plot_errors_norms(h_set, avg_error_tlm, avg_error_fdtd, two_norm_tlm, two_norm_fdtd,
max_norm_tlm, max_norm_fdtd, ord_acc_tlm_one, ord_acc_fdtd_one,
ord_acc_tlm_two, ord_acc_fdtd_two,
ord_acc_tlm_max, ord_acc_fdtd_max, case):
"""
Main plot made of 3 subplots that show (1) the avaraged error,
(2) the two-norm of the error and (3) the max-norm of the error.
:param h_set: spatial step sequence (m).
:type h_set: list of floats
:param avg_error_tlm: error averaged over all receivers for the TLM for
each spatial step.
:type avg_error_tlm: 1d-array
:param avg_error_fdtd: error averaged over all receivers for the FDTD for
each spatial step.
:type avg_error_fdtd: 1d-array
:param two_norm_tlm: relative error in the 2-norm for the TLM for
each spatial step.
:type two_norm_tlm: 1d-array
:param two_norm_fdtd: relative error in the 2-norm for the FDTD for
each spatial step.
:type two_norm_fdtd: 1d-array
:param max_norm_tlm: relative error in the MAX-norm for the TLM for
each spatial step.
:type max_norm_tlm: 1d-array
:param max_norm_fdtd: relative error in the MAX-norm for the FDTD for
each spatial step.
:type max_norm_fdtd: 1d-array
:param ord_acc_tlm_two: order of accuracy between two consecutive grids in
the 2-norm for the TLM.
:type ord_acc_tlm_two: 1d-array
:param ord_acc_fdtd_two: order of accuracy between two consecutive grids in
the 2-norm for the FDTD.
:type ord_acc_fdtd_two: 1d-array
:param ord_acc_tlm_max: order of accuracy between two consecutive grids in
the max-norm for the TLM.
:type ord_acc_tlm_max: 1d-array
:param ord_acc_fdtd_max: order of accuracy between two consecutive grids in
the max-norm for the FDTD.
:type ord_acc_fdtd_max: 1d-array
:param case: integer that sorts of the saved folders in the results
directory.
:type case: int
:return: two graphs, first the errors and norms, second the order of
accuracy for each norm.
"""
print 'Plotting the errors'
h_th = np.linspace(h_set[0] - 0.001, h_set[-1] + 0.001, 100)
j = 1
fig = plt.figure('Errors', figsize=(14, 4.2))
ax = fig.add_subplot(131)
ax.loglog(h_set, avg_error_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, avg_error_fdtd[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * avg_error_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * avg_error_tlm[j], 'b-', lw=1)
plt.legend(('TLM', 'FDTD', '1st order', '2nd order'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{1}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -7, 10 ** -3)
plt.tight_layout()
print np.shape(two_norm_tlm), np.shape(two_norm_fdtd)
ax = fig.add_subplot(132)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * two_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * two_norm_tlm[j], 'b-', lw=1)
ax.loglog(h_set, two_norm_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, two_norm_fdtd[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -7, 10 ** -3)
plt.tight_layout()
ax = fig.add_subplot(133)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * max_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * max_norm_tlm[j], 'b-', lw=1)
ax.loglog(h_set, max_norm_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, max_norm_fdtd[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -7, 10 ** -3)
plt.tight_layout()
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case%i' % case, 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'errors.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
# =========================================================================
# Order of accuracy btw. 2 consecutive points
# =========================================================================
fig = plt.figure('Order of accuracy', figsize=(14, 4.2))
ax = fig.add_subplot(131)
ax.semilogx(h_set[:-1], ord_acc_tlm_one[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_one[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(132)
ax.semilogx(h_set[:-1], ord_acc_tlm_two[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_two[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(133)
ax.semilogx(h_set[:-1], ord_acc_tlm_max[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_max[:], 'go',
markersize=4, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
plt.tight_layout()
plt.savefig(os.path.join(res_path, 'ord_acc.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.show()
def plot_errors_norms_fd_fdtd_tlm(h_set, one_norm_fd, one_norm_tlm, one_norm_fdtd,
two_norm_fd, two_norm_tlm, two_norm_fdtd,
max_norm_fd, max_norm_tlm, max_norm_fdtd,
ord_acc_fd_one, ord_acc_tlm_one, ord_acc_fdtd_one,
ord_acc_fd_two, ord_acc_tlm_two, ord_acc_fdtd_two,
ord_acc_fd_max, ord_acc_tlm_max, ord_acc_fdtd_max, case):
"""
Main plot made of 3 subplots that show (1) the avaraged error,
(2) the two-norm of the error and (3) the max-norm of the error.
:param h_set: spatial step sequence (m).
:type h_set: list of floats
:param avg_error_tlm: error averaged over all receivers for the TLM for
each spatial step.
:type avg_error_tlm: 1d-array
:param avg_error_fdtd: error averaged over all receivers for the FDTD for
each spatial step.
:type avg_error_fdtd: 1d-array
:param two_norm_tlm: relative error in the 2-norm for the TLM for
each spatial step.
:type two_norm_tlm: 1d-array
:param two_norm_fdtd: relative error in the 2-norm for the FDTD for
each spatial step.
:type two_norm_fdtd: 1d-array
:param max_norm_tlm: relative error in the MAX-norm for the TLM for
each spatial step.
:type max_norm_tlm: 1d-array
:param max_norm_fdtd: relative error in the MAX-norm for the FDTD for
each spatial step.
:type max_norm_fdtd: 1d-array
:param ord_acc_tlm_two: order of accuracy between two consecutive grids in
the 2-norm for the TLM.
:type ord_acc_tlm_two: 1d-array
:param ord_acc_fdtd_two: order of accuracy between two consecutive grids in
the 2-norm for the FDTD.
:type ord_acc_fdtd_two: 1d-array
:param ord_acc_tlm_max: order of accuracy between two consecutive grids in
the max-norm for the TLM.
:type ord_acc_tlm_max: 1d-array
:param ord_acc_fdtd_max: order of accuracy between two consecutive grids in
the max-norm for the FDTD.
:type ord_acc_fdtd_max: 1d-array
:param case: integer that sorts of the saved folders in the results
directory.
:type case: int
:return: two graphs, first the errors and norms, second the order of
accuracy for each norm.
"""
print 'Plotting the errors'
h_th = np.linspace(h_set[0] - 0.001, h_set[-1] + 0.001, 100)
j = 1
fig = plt.figure('Errors', figsize=(14, 4.2))
ax = fig.add_subplot(131)
ax.loglog(h_set, one_norm_fd[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.loglog(h_set, one_norm_fdtd[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_set, one_norm_tlm[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * one_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * one_norm_tlm[j], 'b-', lw=1)
plt.legend(('FD', 'FDTD', 'TLM', '1st order', '2nd order'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{1}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -8, 10 ** -0)
plt.tight_layout()
print np.shape(two_norm_tlm), np.shape(two_norm_fdtd)
ax = fig.add_subplot(132)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * two_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * two_norm_tlm[j], 'b-', lw=1)
ax.loglog(h_set, two_norm_fd[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.loglog(h_set, two_norm_fdtd[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_set, two_norm_tlm[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -8, 10 ** -0)
plt.tight_layout()
ax = fig.add_subplot(133)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * max_norm_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * max_norm_tlm[j], 'b-', lw=1)
ax.loglog(h_set, max_norm_fd[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.loglog(h_set, max_norm_fdtd[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_set, max_norm_tlm[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.0e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -8, 10 ** -0)
plt.tight_layout()
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case%i' % case, 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'errors_3.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_3.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'errors_3.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
# =========================================================================
# Order of accuracy btw. 2 consecutive points
# =========================================================================
fig = plt.figure('Order of accuracy', figsize=(14, 4.2))
ax = fig.add_subplot(131)
ax.semilogx(h_set[:-1], ord_acc_fd_one[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_one[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_tlm_one[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
plt.legend(('FD', 'FDTD', 'TLM'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(132)
ax.semilogx(h_set[:-1], ord_acc_fd_two[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_two[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_tlm_two[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
ax = fig.add_subplot(133)
ax.semilogx(h_set[:-1], ord_acc_fd_max[:], 'bd',
markersize=4, markeredgewidth=1.2, markeredgecolor='b',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_fdtd_max[:], 'go',
markersize=5, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.semilogx(h_set[:-1], ord_acc_tlm_max[:], 'rs',
markersize=5, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
# plt.legend(('TLM', 'FDTD'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1f'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel('Obs. order of accuracy', fontsize=12)
plt.ylim(0, 4)
plt.tight_layout()
plt.savefig(os.path.join(res_path, 'ord_acc_3.eps'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc_3.png'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.savefig(os.path.join(res_path, 'ord_acc_3.pdf'), transparent=True,
bbox_inches='tight', pad_inches=0)
plt.show()
def plot_errors_norms_dec(dec_error_axial_fdtd, dec_error_diag_fdtd,
dec_error_axial_tlm, dec_error_diag_tlm,
dec_two_norm_axial_fdtd, dec_max_norm_axial_fdtd,
dec_two_norm_diag_fdtd, dec_max_norm_diag_fdtd,
dec_two_norm_axial_tlm, dec_max_norm_axial_tlm,
dec_two_norm_diag_tlm, dec_max_norm_diag_tlm,
h_set):
"""
Same as in function plot_errors_norm() but only valid for the geometrical
spreading with the theoretical decrease.
:param dec_error_axial_fdtd:
:type dec_error_axial_fdtd:
:param dec_error_diag_fdtd:
:type dec_error_diag_fdtd:
:param dec_error_axial_tlm:
:type dec_error_axial_tlm:
:param dec_error_diag_tlm:
:type dec_error_diag_tlm:
:param dec_two_norm_axial_fdtd:
:type dec_two_norm_axial_fdtd:
:param dec_max_norm_axial_fdtd:
:type dec_max_norm_axial_fdtd:
:param dec_two_norm_diag_fdtd:
:type dec_two_norm_diag_fdtd:
:param dec_max_norm_diag_fdtd:
:type dec_max_norm_diag_fdtd:
:param dec_two_norm_axial_tlm:
:type dec_two_norm_axial_tlm:
:param dec_max_norm_axial_tlm:
:type dec_max_norm_axial_tlm:
:param dec_two_norm_diag_tlm:
:type dec_two_norm_diag_tlm:
:param dec_max_norm_diag_tlm:
:type dec_max_norm_diag_tlm:
:param case: integer that sorts of the saved folders in the results directory.
:type case: int
:return:
:rtype:
"""
h_th = np.linspace(h_set[0] - 0.001, h_set[-1] + 0.001, 100)
j = 1
fig = plt.figure('Errors decrease axial', figsize=(14 ,4.2))
ax = fig.add_subplot(131)
ax.loglog(h_set, dec_error_axial_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, dec_error_axial_fdtd[:], 'go',
markersize=3, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * dec_error_axial_fdtd[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * dec_error_axial_fdtd[j], 'b-', lw=1)
plt.legend(('TLM', 'FDTD', '1st order', '2nd order'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'averaged error', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -5, 10 ** -0)
plt.tight_layout()
ax = fig.add_subplot(132)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * dec_two_norm_axial_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * dec_two_norm_axial_tlm[j], 'b-', lw=1)
ax.loglog(h_set, dec_two_norm_axial_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, dec_two_norm_axial_fdtd[:], 'go',
markersize=3, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -5, 10 ** -0)
plt.tight_layout()
ax = fig.add_subplot(133)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * dec_max_norm_axial_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * dec_max_norm_axial_tlm[j], 'b-', lw=1)
ax.loglog(h_set, dec_max_norm_axial_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, dec_max_norm_axial_fdtd[:], 'go',
markersize=3, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -5, 10 ** -0)
plt.tight_layout()
res_path = os.path.join(base_path.rsplit(os.sep, 1)[0],
'results', 'case1', 'figures')
if not os.path.exists(res_path):
os.makedirs(res_path)
plt.savefig(os.path.join(res_path, 'pres_dec_errors_fdtd_axial.eps'),
transparent=True, bbox_inches='tight',
pad_inches=0)
plt.savefig(os.path.join(res_path, 'pres_dec_errors_fdtd_axial.png'),
transparent=True, bbox_inches='tight',
pad_inches=0)
plt.savefig(os.path.join(res_path, 'pres_dec_errors_fdtd_axial.pdf'),
transparent=True, bbox_inches='tight',
pad_inches=0)
fig = plt.figure('Errors decrease diagonal', figsize=(14, 4.2))
ax = fig.add_subplot(131)
ax.loglog(h_set, dec_error_diag_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, dec_error_diag_fdtd[:], 'go',
markersize=3, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * dec_error_diag_fdtd[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * dec_error_diag_fdtd[j], 'b-', lw=1)
plt.legend(('TLM', 'FDTD', '1st order', '2nd order'))
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'averaged error', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -5, 10 ** -0)
plt.tight_layout()
ax = fig.add_subplot(132)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * dec_two_norm_diag_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * dec_two_norm_diag_tlm[j], 'b-', lw=1)
ax.loglog(h_set, dec_two_norm_diag_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, dec_two_norm_diag_fdtd[:], 'go',
markersize=3, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{2}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -5, 10 ** -0)
plt.tight_layout()
ax = fig.add_subplot(133)
ax.loglog(h_th, (h_th / h_set[j]) ** 1 * dec_max_norm_diag_tlm[j], 'm--', lw=1)
ax.loglog(h_th, (h_th / h_set[j]) ** 2 * dec_max_norm_diag_tlm[j], 'b-', lw=1)
ax.loglog(h_set, dec_max_norm_diag_tlm[:], 'rs',
markersize=7, markeredgewidth=1.8, markeredgecolor='r',
markerfacecolor='None')
ax.loglog(h_set, dec_max_norm_diag_fdtd[:], 'go',
markersize=3, markeredgewidth=1.8, markeredgecolor='g',
markerfacecolor='None')
ax.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.2e'))
ax.grid(True, which="both", ls=":")
plt.xlabel('$h$ (m)', fontsize=12)
plt.ylabel(r'$||error||_{max}$', fontsize=12)
plt.xlim(h_set[0] - 0.001, h_set[-1] + 0.001)
plt.ylim(10 ** -5, 10 ** -0)
plt.tight_layout()
| 45.721419
| 91
| 0.600764
| 5,101
| 34,794
| 3.892766
| 0.047442
| 0.030619
| 0.027648
| 0.058015
| 0.937805
| 0.905474
| 0.900287
| 0.893891
| 0.888402
| 0.876165
| 0
| 0.036519
| 0.21771
| 34,794
| 760
| 92
| 45.781579
| 0.693009
| 0.070127
| 0
| 0.783784
| 0
| 0
| 0.081446
| 0.003369
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.007722
| null | null | 0.009653
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
800f1c974609331069b47a8ef9dcc1c85654a7a1
| 161
|
py
|
Python
|
main.py
|
agopalak/football_pred
|
48b150bfe3e117f5632f5117ebeff044778c45ad
|
[
"MIT"
] | null | null | null |
main.py
|
agopalak/football_pred
|
48b150bfe3e117f5632f5117ebeff044778c45ad
|
[
"MIT"
] | null | null | null |
main.py
|
agopalak/football_pred
|
48b150bfe3e117f5632f5117ebeff044778c45ad
|
[
"MIT"
] | null | null | null |
from pre_proc import get_weather
#from pre_proc import get_nfldata
get_weather.fetch_weather('Tampa, FL', '12/19/2010', '1:00 PM')
#get_nfldata.get_nfldata()
| 20.125
| 63
| 0.770186
| 28
| 161
| 4.142857
| 0.571429
| 0.258621
| 0.189655
| 0.293103
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076389
| 0.10559
| 161
| 7
| 64
| 23
| 0.729167
| 0.354037
| 0
| 0
| 0
| 0
| 0.26
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
33bbb51a72521d081fcd148fd6223e98315d50b8
| 2,167
|
py
|
Python
|
tests/test_run.py
|
Maddosaurus/MLT
|
8276d3eb72614368dea9160f8afc474f0c42a1b5
|
[
"Apache-2.0"
] | 4
|
2019-02-01T12:15:47.000Z
|
2021-11-15T12:38:52.000Z
|
tests/test_run.py
|
Maddosaurus/MLT
|
8276d3eb72614368dea9160f8afc474f0c42a1b5
|
[
"Apache-2.0"
] | 2
|
2020-03-21T21:42:36.000Z
|
2020-09-25T23:20:00.000Z
|
tests/test_run.py
|
Maddosaurus/MLT
|
8276d3eb72614368dea9160f8afc474f0c42a1b5
|
[
"Apache-2.0"
] | 1
|
2019-06-05T11:09:05.000Z
|
2019-06-05T11:09:05.000Z
|
# pylint: disable=redefined-outer-name,missing-docstring,unused-import,no-self-use
import pytest
import argparse
from .context import run, base_runner
def test_create_parser():
assert isinstance(run.create_parser(), argparse.ArgumentParser)
def test_nsl_kfold(monkeypatch):
def mock_run_nsl(args):
assert args.kfolds == 2
assert args.AutoEncoder[0] == 32.0
assert args.AutoEncoder[1] == 100.0
assert args.AutoEncoder[2] == 0.2
assert args.AutoEncoder[3] == 0.1
monkeypatch.setattr(base_runner, 'run_NSL', mock_run_nsl)
parser = run.create_parser()
args = parser.parse_args(['--unsupervised', '-k', '2', '--nsl16', '--AutoEncoder', '32', '100', '0.2', '0.1'])
run.main(args)
def test_nsl_single(monkeypatch):
def mock_run_nsl(args):
assert args.AutoEncoder[0] == 32.0
assert args.AutoEncoder[1] == 100.0
assert args.AutoEncoder[2] == 0.2
assert args.AutoEncoder[3] == 0.1
monkeypatch.setattr(base_runner, 'run_NSL', mock_run_nsl)
parser = run.create_parser()
args = parser.parse_args(['--unsupervised', '--single', '--nsl16', '--AutoEncoder', '32', '100', '0.2', '0.1'])
run.main(args)
def test_cic_kfold(monkeypatch):
def mock_run_cic(args):
assert args.kfolds == 2
assert args.AutoEncoder[0] == 32.0
assert args.AutoEncoder[1] == 100.0
assert args.AutoEncoder[2] == 0.2
assert args.AutoEncoder[3] == 0.1
monkeypatch.setattr(base_runner, 'run_CIC', mock_run_cic)
parser = run.create_parser()
args = parser.parse_args(['--unsupervised', '-k', '2', '--cic20', '--AutoEncoder', '32', '100', '0.2', '0.1'])
run.main(args)
def test_cic_single(monkeypatch):
def mock_run_cic(args):
assert args.AutoEncoder[0] == 32.0
assert args.AutoEncoder[1] == 100.0
assert args.AutoEncoder[2] == 0.2
assert args.AutoEncoder[3] == 0.1
monkeypatch.setattr(base_runner, 'run_CIC', mock_run_cic)
parser = run.create_parser()
args = parser.parse_args(['--unsupervised', '--single', '--cic20', '--AutoEncoder', '32', '100', '0.2', '0.1'])
run.main(args)
| 36.728814
| 115
| 0.640517
| 299
| 2,167
| 4.494983
| 0.160535
| 0.133929
| 0.25
| 0.130952
| 0.844494
| 0.828125
| 0.828125
| 0.828125
| 0.756696
| 0.756696
| 0
| 0.061574
| 0.190586
| 2,167
| 59
| 116
| 36.728814
| 0.704675
| 0.036917
| 0
| 0.723404
| 0
| 0
| 0.110259
| 0
| 0
| 0
| 0
| 0
| 0.404255
| 1
| 0.191489
| false
| 0
| 0.06383
| 0
| 0.255319
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
33c6f13611fcfbde21bb7c79d1cb27ce55189122
| 204
|
py
|
Python
|
tests/samples/indentation.py
|
spamegg1/snoop
|
2d169d003de4382717f45592f5799983c26a8573
|
[
"MIT"
] | 751
|
2019-07-03T13:40:38.000Z
|
2022-03-30T02:28:00.000Z
|
tests/samples/indentation.py
|
spamegg1/snoop
|
2d169d003de4382717f45592f5799983c26a8573
|
[
"MIT"
] | 42
|
2019-07-04T19:30:36.000Z
|
2022-03-26T09:19:19.000Z
|
tests/samples/indentation.py
|
spamegg1/snoop
|
2d169d003de4382717f45592f5799983c26a8573
|
[
"MIT"
] | 30
|
2019-07-14T15:55:27.000Z
|
2022-03-19T16:38:12.000Z
|
import snoop
@snoop.snoop(depth=2)
def main():
f2()
def f2():
f3()
def f3():
f4()
@snoop.snoop(depth=2)
def f4():
f5()
def f5():
pass
if __name__ == '__main__':
main()
| 7.285714
| 26
| 0.509804
| 29
| 204
| 3.310345
| 0.448276
| 0.3125
| 0.3125
| 0.333333
| 0.395833
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069444
| 0.294118
| 204
| 27
| 27
| 7.555556
| 0.597222
| 0
| 0
| 0.133333
| 0
| 0
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.066667
| 0.066667
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d5136b6bb241d8484614f11588761e02420c0560
| 164
|
py
|
Python
|
test/input/058.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
test/input/058.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
test/input/058.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
assert 1 != 2, "Check that math still makes sense in this particular simulation. Check it with a really long assert and see if we can properly format said assert."
| 82
| 163
| 0.77439
| 29
| 164
| 4.37931
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.182927
| 164
| 1
| 164
| 164
| 0.932836
| 0
| 0
| 0
| 0
| 1
| 0.890244
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d1ac419ab6f2a11fed2ad3cf1d0c35e97d831f1
| 6,714
|
py
|
Python
|
bindings/python/ensmallen_graph/datasets/string/panicumvirgatum.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/string/panicumvirgatum.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/string/panicumvirgatum.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
"""
This file offers the methods to automatically retrieve the graph Panicum virgatum.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 18:10:24.982416
The undirected graph Panicum virgatum has 57795 nodes and 30051846 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.01799 and has 4 connected components, where the component with most
nodes has 57784 nodes and the component with the least nodes has 3 nodes.
The graph median node degree is 495, the mean node degree is 1039.95, and
the node degree mode is 4. The top 5 most central nodes are 38727.Pavir.Aa02237.1.p
(degree 17307), 38727.Pavir.J34942.1.p (degree 14530), 38727.Pavir.Hb00840.1.p
(degree 14530), 38727.Pavir.J29120.1.p (degree 14523) and 38727.Pavir.Aa02697.1.p
(degree 13354).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import PanicumVirgatum
# Then load the graph
graph = PanicumVirgatum()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def PanicumVirgatum(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Panicum virgatum graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Panicum virgatum graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 18:10:24.982416
The undirected graph Panicum virgatum has 57795 nodes and 30051846 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.01799 and has 4 connected components, where the component with most
nodes has 57784 nodes and the component with the least nodes has 3 nodes.
The graph median node degree is 495, the mean node degree is 1039.95, and
the node degree mode is 4. The top 5 most central nodes are 38727.Pavir.Aa02237.1.p
(degree 17307), 38727.Pavir.J34942.1.p (degree 14530), 38727.Pavir.Hb00840.1.p
(degree 14530), 38727.Pavir.J29120.1.p (degree 14523) and 38727.Pavir.Aa02697.1.p
(degree 13354).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import PanicumVirgatum
# Then load the graph
graph = PanicumVirgatum()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="PanicumVirgatum",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 35.151832
| 223
| 0.700328
| 919
| 6,714
| 5.077258
| 0.279652
| 0.029147
| 0.017145
| 0.011144
| 0.826404
| 0.826404
| 0.826404
| 0.826404
| 0.826404
| 0.802829
| 0
| 0.062394
| 0.212243
| 6,714
| 190
| 224
| 35.336842
| 0.819815
| 0.933721
| 0
| 0
| 0
| 0
| 0.059965
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.176471
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d699cbfcf0b8d0627a1300eda28c06e4cf6f5d0
| 857
|
py
|
Python
|
notebook/jupyter_system_command_cd.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 174
|
2018-05-30T21:14:50.000Z
|
2022-03-25T07:59:37.000Z
|
notebook/jupyter_system_command_cd.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 5
|
2019-08-10T03:22:02.000Z
|
2021-07-12T20:31:17.000Z
|
notebook/jupyter_system_command_cd.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 53
|
2018-04-27T05:26:35.000Z
|
2022-03-25T07:59:37.000Z
|
import os
print(os.getcwd())
# /Users/mbp/Documents/my-project/python-snippets/notebook
!pwd
# /Users/mbp/Documents/my-project/python-snippets/notebook
%pwd
# '/Users/mbp/Documents/my-project/python-snippets/notebook'
!cd data
print(os.getcwd())
# /Users/mbp/Documents/my-project/python-snippets/notebook
%cd data
# /Users/mbp/Documents/my-project/python-snippets/notebook/data
print(os.getcwd())
# /Users/mbp/Documents/my-project/python-snippets/notebook/data
!pwd
# /Users/mbp/Documents/my-project/python-snippets/notebook/data
%pwd
# '/Users/mbp/Documents/my-project/python-snippets/notebook/data'
cd ..
# /Users/mbp/Documents/my-project/python-snippets/notebook
print(os.getcwd())
# /Users/mbp/Documents/my-project/python-snippets/notebook
os.chdir('data')
print(os.getcwd())
# /Users/mbp/Documents/my-project/python-snippets/notebook/data
| 21.974359
| 65
| 0.767795
| 122
| 857
| 5.393443
| 0.131148
| 0.133739
| 0.284195
| 0.317629
| 0.974164
| 0.974164
| 0.974164
| 0.974164
| 0.901216
| 0.828267
| 0
| 0
| 0.061844
| 857
| 38
| 66
| 22.552632
| 0.818408
| 0.764294
| 0
| 0.642857
| 0
| 0
| 0.021053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.071429
| null | null | 0.357143
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d529af58a4e54f7775e265814bb3d45520012a0e
| 152
|
py
|
Python
|
hooks/post_gen_project.py
|
janclemenslab/cookiecutter_cluster-template
|
e0d8d40f96f1b9ae329bf6a991246f6b3b49b9d0
|
[
"Apache-2.0"
] | 1
|
2022-03-15T01:25:38.000Z
|
2022-03-15T01:25:38.000Z
|
hooks/post_gen_project.py
|
janclemenslab/cookiecutter_cluster-template
|
e0d8d40f96f1b9ae329bf6a991246f6b3b49b9d0
|
[
"Apache-2.0"
] | null | null | null |
hooks/post_gen_project.py
|
janclemenslab/cookiecutter_cluster-template
|
e0d8d40f96f1b9ae329bf6a991246f6b3b49b9d0
|
[
"Apache-2.0"
] | null | null | null |
print('Generated new project in "{{ cookiecutter.project_name }}".')
print('See "{{ cookiecutter.project_name }}/README.md" for further instructions.')
| 50.666667
| 82
| 0.736842
| 18
| 152
| 6.111111
| 0.722222
| 0.345455
| 0.418182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092105
| 152
| 2
| 83
| 76
| 0.797101
| 0
| 0
| 0
| 1
| 0
| 0.868421
| 0.328947
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
d5ad98073dd3ee64c564eaaa926832a147226a49
| 164
|
py
|
Python
|
blog/admin.py
|
tarun-developer/Blog_site
|
6edc963683a32093e0ce1e84cf3d31c42dcf056a
|
[
"Apache-2.0"
] | 1
|
2020-08-10T15:59:04.000Z
|
2020-08-10T15:59:04.000Z
|
blog/admin.py
|
tarun-developer/Blog_site
|
6edc963683a32093e0ce1e84cf3d31c42dcf056a
|
[
"Apache-2.0"
] | 8
|
2021-04-08T21:51:44.000Z
|
2022-03-12T00:36:53.000Z
|
blog/admin.py
|
bayazidtamim/Safaesying-verssion-0.0.0
|
eaf86a211a26ea8e47326c15475eb76bb4e42214
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from blog.models.comment import Comment
from blog.models.post import Post
admin.site.register(Post)
admin.site.register(Comment)
| 20.5
| 39
| 0.823171
| 25
| 164
| 5.4
| 0.44
| 0.118519
| 0.207407
| 0.311111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 164
| 7
| 40
| 23.428571
| 0.912162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
63643780002884b6751fdfac99e66396ed531d84
| 4,295
|
py
|
Python
|
tests/environments/test_reqs.py
|
FollowTheProcess/pytoil
|
b13acb14f015ae5399d7697bdc3e0e475dff03ec
|
[
"Apache-2.0"
] | 6
|
2021-05-08T20:31:03.000Z
|
2022-03-08T01:25:43.000Z
|
tests/environments/test_reqs.py
|
FollowTheProcess/pytoil
|
b13acb14f015ae5399d7697bdc3e0e475dff03ec
|
[
"Apache-2.0"
] | 116
|
2021-07-08T11:21:22.000Z
|
2022-03-30T14:04:51.000Z
|
tests/environments/test_reqs.py
|
FollowTheProcess/pytoil
|
b13acb14f015ae5399d7697bdc3e0e475dff03ec
|
[
"Apache-2.0"
] | null | null | null |
"""
Tests for the RequirementsTxtEnv class.
Author: Tom Fleet
Created: 15/07/2021
"""
from pathlib import Path
from pytest_mock import MockerFixture
from pytoil.environments import ReqTxtEnv
def test_reqenv_init():
root = Path("/Users/me/fakeproject")
venv = ReqTxtEnv(project_path=root)
assert venv.project_path == root
assert venv.executable == root.joinpath(".venv/bin/python")
def test_reqenv_repr():
root = Path("/Users/me/fakeproject")
venv = ReqTxtEnv(project_path=root)
assert repr(venv) == f"ReqTxtEnv(project_path={root!r})"
def test_reqenv_info_name():
root = Path("/Users/me/fakeproject")
venv = ReqTxtEnv(project_path=root)
assert venv.info_name == "requirements file"
def test_executable_points_to_correct_path():
root = Path("/Users/me/fakeproject")
venv = ReqTxtEnv(project_path=root)
assert venv.executable == root.joinpath(".venv/bin/python")
def test_install_self_passes_correct_command_to_subprocess_dev_txt(
mocker: MockerFixture, requirements_dev_project
):
root = requirements_dev_project
venv = ReqTxtEnv(project_path=root)
mock_subprocess = mocker.patch(
"pytoil.environments.reqs.subprocess.run", autospec=True
)
# Make it think the venv exists already
mocker.patch(
"pytoil.environments.reqs.ReqTxtEnv.exists", autospec=True, return_value=True
)
venv.install_self()
mock_subprocess.assert_called_once_with(
[
f"{venv.executable}",
"-m",
"pip",
"install",
"-r",
"requirements_dev.txt",
"--quiet",
],
check=True,
cwd=root,
)
def test_install_self_passes_correct_command_to_subprocess_req_txt(
mocker: MockerFixture, requirements_project
):
root = requirements_project
venv = ReqTxtEnv(project_path=root)
mock_subprocess = mocker.patch(
"pytoil.environments.reqs.subprocess.run", autospec=True
)
# Make it think the venv exists already
mocker.patch(
"pytoil.environments.reqs.ReqTxtEnv.exists", autospec=True, return_value=True
)
venv.install_self()
mock_subprocess.assert_called_once_with(
[
f"{venv.executable}",
"-m",
"pip",
"install",
"-r",
"requirements.txt",
"--quiet",
],
check=True,
cwd=root,
)
def test_install_self_creates_environment_if_doesnt_exist_first_req_txt(
mocker: MockerFixture, requirements_project
):
root = requirements_project
venv = ReqTxtEnv(project_path=root)
mock_subprocess = mocker.patch(
"pytoil.environments.reqs.subprocess.run", autospec=True
)
# Make it think the venv doesn't exist
mocker.patch(
"pytoil.environments.reqs.ReqTxtEnv.exists", autospec=True, return_value=False
)
mock_create = mocker.patch(
"pytoil.environments.reqs.ReqTxtEnv.create", autospec=True
)
venv.install_self()
mock_create.assert_called_once()
mock_subprocess.assert_called_once_with(
[
f"{venv.executable}",
"-m",
"pip",
"install",
"-r",
"requirements.txt",
"--quiet",
],
check=True,
cwd=root,
)
def test_install_self_creates_environment_if_doesnt_exist_first_dev_txt(
mocker: MockerFixture, requirements_dev_project
):
root = requirements_dev_project
venv = ReqTxtEnv(project_path=root)
mock_subprocess = mocker.patch(
"pytoil.environments.reqs.subprocess.run", autospec=True
)
# Make it think the venv doesn't exist
mocker.patch(
"pytoil.environments.reqs.ReqTxtEnv.exists", autospec=True, return_value=False
)
mock_create = mocker.patch(
"pytoil.environments.reqs.ReqTxtEnv.create", autospec=True
)
venv.install_self()
mock_create.assert_called_once()
mock_subprocess.assert_called_once_with(
[
f"{venv.executable}",
"-m",
"pip",
"install",
"-r",
"requirements_dev.txt",
"--quiet",
],
check=True,
cwd=root,
)
| 22.605263
| 86
| 0.632829
| 468
| 4,295
| 5.568376
| 0.183761
| 0.075979
| 0.057559
| 0.111282
| 0.886416
| 0.886416
| 0.886416
| 0.886416
| 0.886416
| 0.857636
| 0
| 0.002519
| 0.260536
| 4,295
| 189
| 87
| 22.724868
| 0.81801
| 0.053318
| 0
| 0.75
| 0
| 0
| 0.195068
| 0.127744
| 0
| 0
| 0
| 0
| 0.085938
| 1
| 0.0625
| false
| 0.015625
| 0.023438
| 0
| 0.085938
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63b85802f6a8410c7249cce2360ef670696bf025
| 685
|
py
|
Python
|
lib/tool_shed/util/tool_util.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | null | null | null |
lib/tool_shed/util/tool_util.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | 6
|
2021-11-11T20:57:49.000Z
|
2021-12-10T15:30:33.000Z
|
lib/tool_shed/util/tool_util.py
|
beatrizserrano/galaxy
|
e149d9d32e1bca6c07c38b1a9cdabfee60323610
|
[
"CC-BY-3.0"
] | null | null | null |
from galaxy.tool_shed.util.tool_util import (
build_shed_tool_conf_select_field,
build_tool_panel_section_select_field,
copy_sample_file,
copy_sample_files,
generate_message_for_invalid_tools,
get_tool_path_install_dir,
handle_missing_index_file,
is_data_index_sample_file,
new_state,
panel_entry_per_tool,
)
__all__ = (
"build_shed_tool_conf_select_field",
"build_tool_panel_section_select_field",
"copy_sample_file",
"copy_sample_files",
"generate_message_for_invalid_tools",
"get_tool_path_install_dir",
"handle_missing_index_file",
"is_data_index_sample_file",
"new_state",
"panel_entry_per_tool",
)
| 26.346154
| 45
| 0.769343
| 95
| 685
| 4.789474
| 0.368421
| 0.096703
| 0.057143
| 0.074725
| 0.914286
| 0.914286
| 0.914286
| 0.914286
| 0.914286
| 0.914286
| 0
| 0
| 0.160584
| 685
| 25
| 46
| 27.4
| 0.791304
| 0
| 0
| 0
| 1
| 0
| 0.351825
| 0.261314
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
896970d180a8eee7ac45536fea4dcb4513bede02
| 5,705
|
py
|
Python
|
internos/etools/migrations/0006_auto_20190303_2148.py
|
UNICEFLebanonInnovation/Staging-Neuro
|
aac1e4f335ff4ec32041f989a9c22f8581a4961a
|
[
"MIT"
] | 1
|
2020-12-12T07:41:11.000Z
|
2020-12-12T07:41:11.000Z
|
internos/etools/migrations/0006_auto_20190303_2148.py
|
UNICEFLebanonInnovation/Staging-Neuro
|
aac1e4f335ff4ec32041f989a9c22f8581a4961a
|
[
"MIT"
] | 9
|
2019-12-31T09:30:23.000Z
|
2022-01-13T00:49:47.000Z
|
internos/etools/migrations/0006_auto_20190303_2148.py
|
UNICEFLebanonInnovation/Staging-Neuro
|
aac1e4f335ff4ec32041f989a9c22f8581a4961a
|
[
"MIT"
] | 1
|
2020-02-03T13:12:55.000Z
|
2020-02-03T13:12:55.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2019-03-03 21:48
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('etools', '0005_auto_20190303_1708'),
]
operations = [
migrations.AddField(
model_name='pca',
name='actual_amount',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='all_currencies_are_consistent',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='budget_currency',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='cp_outputs',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='cso_contribution',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='donor_codes',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='donors',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='flagged_sections',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='fr_currencies_are_consistent',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='fr_currency',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='frs_earliest_start_date',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='frs_latest_end_date',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='frs_total_frs_amt',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='frs_total_intervention_amt',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='frs_total_outstanding_amt',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='grants',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='location_p_codes',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='multi_curr_flag',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='offices',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='offices_names',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='section_names',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='sections',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
migrations.AddField(
model_name='pca',
name='total_budget',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='total_unicef_budget',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='unicef_cash',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='pca',
name='unicef_focal_points',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, null=True, size=None),
),
]
| 38.809524
| 139
| 0.595267
| 614
| 5,705
| 5.351792
| 0.149837
| 0.142422
| 0.181984
| 0.213634
| 0.877967
| 0.877967
| 0.86762
| 0.859099
| 0.859099
| 0.859099
| 0
| 0.0271
| 0.282033
| 5,705
| 146
| 140
| 39.075342
| 0.775146
| 0.011919
| 0
| 0.748201
| 1
| 0
| 0.090699
| 0.027334
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021583
| 0
| 0.043165
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
898a3d56dc13d076206e44d0d6cfeec052254ce5
| 6,751
|
py
|
Python
|
tests/unit_tests/test_model_event.py
|
JamesNolan17/SUTDHousingPortal
|
12aeefd27a917c32ce961cafb82bbe28630901a1
|
[
"MIT"
] | 1
|
2021-04-09T11:29:40.000Z
|
2021-04-09T11:29:40.000Z
|
tests/unit_tests/test_model_event.py
|
JamesNolan17/SUTDHousingPortal
|
12aeefd27a917c32ce961cafb82bbe28630901a1
|
[
"MIT"
] | 4
|
2021-03-22T16:33:20.000Z
|
2021-06-19T05:06:57.000Z
|
tests/unit_tests/test_model_event.py
|
JamesNolan17/SUTDHousingPortal
|
12aeefd27a917c32ce961cafb82bbe28630901a1
|
[
"MIT"
] | 1
|
2021-05-14T06:00:04.000Z
|
2021-05-14T06:00:04.000Z
|
import sys
import unittest
from datetime import datetime
from pathlib import Path
from pydantic.error_wrappers import ValidationError
src_dir = Path(__file__).resolve().parent.parent.parent / "src"
sys.path.insert(0, str(src_dir))
from api.models.event import Event
class TestEventCreation(unittest.TestCase):
def test_creation_with_missing_data(self):
with self.assertRaises(Exception):
event = Event(
title="Inter Block Movie Night",
event_type="IBE",
meetup_location="BLK 57, Student Lounge",
)
def test_creation_with_missing_data_2(self):
with self.assertRaises(Exception):
event = Event(
event_type="IBE",
meetup_location="BLK 57, Student Lounge",
)
def test_uid_creation(self):
event = Event(
title="Inter Block Movie Night",
event_type="IBE",
meetup_location="BLK 57, Student Lounge",
start_time=datetime.now(),
)
print(event)
self.assertTrue(isinstance(event, Event))
self.assertTrue(event.uid.startswith("E"))
def test_creation_with_minimal_data(self):
_now = datetime.now()
event = Event(
title="Inter Block Movie Night",
event_type="IBE",
meetup_location="BLK 57, Student Lounge",
start_time=_now,
)
print(event)
self.assertTrue(isinstance(event, Event))
self.assertTrue(event.title == "Inter Block Movie Night")
self.assertTrue(event.event_type == "IBE")
self.assertTrue(event.meetup_location == "BLK 57, Student Lounge")
self.assertTrue(event.start_time == _now)
def test_creation_with_full_data(self):
_now = datetime.now()
event = Event(
title="Inter Block Movie Night",
event_type="FE",
meetup_location="Root Cove",
start_time=_now,
block="59",
floor="8",
description="Let's watch a movie together.",
duration_mins=120,
count_attendance=True,
signup_limit=30,
signup_ddl=_now,
archived=False,
)
print(event)
self.assertTrue(isinstance(event, Event))
self.assertTrue(event.title == "Inter Block Movie Night")
self.assertTrue(event.event_type == "FE")
self.assertTrue(event.meetup_location == "Root Cove")
self.assertTrue(event.start_time == _now)
self.assertTrue(event.block == "59")
self.assertTrue(event.floor == "8")
self.assertTrue(event.description == "Let's watch a movie together.")
self.assertTrue(event.duration_mins == 120)
self.assertTrue(event.count_attendance == True)
self.assertTrue(event.signup_limit == 30)
self.assertTrue(event.signup_ddl == _now)
self.assertTrue(event.archived == False)
def test_creation_with_full_data_string_int(self):
_now = datetime.now()
event = Event(
title="Inter Block Movie Night",
event_type="FE",
meetup_location="Root Cove",
start_time=_now,
block="59",
floor="8",
description="Let's watch a movie together.",
duration_mins="120",
count_attendance=True,
signup_limit="30",
signup_ddl=_now,
archived=False,
)
print(event)
self.assertTrue(isinstance(event, Event))
self.assertTrue(event.title == "Inter Block Movie Night")
self.assertTrue(event.event_type == "FE")
self.assertTrue(event.meetup_location == "Root Cove")
self.assertTrue(event.start_time == _now)
self.assertTrue(event.block == "59")
self.assertTrue(event.floor == "8")
self.assertTrue(event.description == "Let's watch a movie together.")
self.assertTrue(event.duration_mins == 120)
self.assertTrue(event.count_attendance == True)
self.assertTrue(event.signup_limit == 30)
self.assertTrue(event.signup_ddl == _now)
self.assertTrue(event.archived == False)
def test_creation_with_full_data_invalid_string(self):
_now = datetime.now()
event = Event(
title="Inter Block Movie Night",
event_type="FE",
meetup_location="Root Cove",
start_time=_now,
block="59",
floor="8",
description="Let's watch a movie together.",
duration_mins="xxxxxx",
count_attendance=True,
signup_limit="xxxxxx",
signup_ddl=_now,
archived=False,
)
print(event)
self.assertTrue(isinstance(event, Event))
self.assertTrue(event.title == "Inter Block Movie Night")
self.assertTrue(event.event_type == "FE")
self.assertTrue(event.meetup_location == "Root Cove")
self.assertTrue(event.start_time == _now)
self.assertTrue(event.block == "59")
self.assertTrue(event.floor == "8")
self.assertTrue(event.description == "Let's watch a movie together.")
self.assertTrue(event.duration_mins == 60)
self.assertTrue(event.count_attendance == True)
self.assertTrue(event.signup_limit == 20)
self.assertTrue(event.signup_ddl == _now)
self.assertTrue(event.archived == False)
def test_creation_with_invalid_data(self):
_now = datetime.now()
event = Event(
title="Inter Block Movie Night",
event_type="IBB",
meetup_location="Root Cove",
start_time=_now,
block="999",
floor="888",
description="Let's watch a movie together.",
duration_mins=0,
count_attendance=True,
signup_limit=-10,
archived="False",
)
print(event)
self.assertTrue(isinstance(event, Event))
self.assertTrue(event.title == "Inter Block Movie Night")
self.assertTrue(event.event_type == "MEETUP")
self.assertTrue(event.meetup_location == "Root Cove")
self.assertTrue(event.start_time == _now)
self.assertTrue(event.block == "ANY")
self.assertTrue(event.floor == "ANY")
self.assertTrue(event.description == "Let's watch a movie together.")
self.assertTrue(event.duration_mins == 60)
self.assertTrue(event.count_attendance == True)
self.assertTrue(event.signup_limit == 20)
self.assertTrue(event.signup_ddl == _now)
self.assertTrue(event.archived == False)
if __name__ == "__main__":
unittest.main()
| 36.89071
| 77
| 0.600652
| 743
| 6,751
| 5.269179
| 0.130552
| 0.210983
| 0.257216
| 0.061303
| 0.889911
| 0.869732
| 0.834227
| 0.814815
| 0.804853
| 0.793103
| 0
| 0.013921
| 0.287069
| 6,751
| 182
| 78
| 37.093407
| 0.799501
| 0
| 0
| 0.724551
| 0
| 0
| 0.11702
| 0
| 0
| 0
| 0
| 0
| 0.365269
| 1
| 0.047904
| false
| 0
| 0.035928
| 0
| 0.08982
| 0.035928
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
987ee875fb23dec93b6c00bd49c271c7cde9dc6b
| 14,540
|
py
|
Python
|
manageXML/migrations/0001_initial.py
|
mikahama/verdd
|
802fae5d3725a6fa34065cbee194f1b904d4be52
|
[
"Apache-2.0"
] | 5
|
2020-08-10T16:53:00.000Z
|
2021-12-07T13:04:53.000Z
|
manageXML/migrations/0001_initial.py
|
mikahama/verdd
|
802fae5d3725a6fa34065cbee194f1b904d4be52
|
[
"Apache-2.0"
] | null | null | null |
manageXML/migrations/0001_initial.py
|
mikahama/verdd
|
802fae5d3725a6fa34065cbee194f1b904d4be52
|
[
"Apache-2.0"
] | 2
|
2020-12-26T22:31:55.000Z
|
2021-03-26T20:15:46.000Z
|
# Generated by Django 2.2.1 on 2019-06-14 15:00
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='DataFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('lang_source', models.CharField(max_length=3)),
('lang_target', models.CharField(max_length=3)),
('name', models.CharField(max_length=250)),
('added_date', models.DateTimeField(auto_now_add=True, verbose_name='date published')),
],
),
migrations.CreateModel(
name='Lexeme',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('lexeme', models.CharField(max_length=250)),
('homoId', models.IntegerField(default=0)),
('assonance', models.CharField(blank=True, max_length=250)),
('assonance_rev', models.CharField(blank=True, max_length=250)),
('consonance', models.CharField(blank=True, max_length=250)),
('consonance_rev', models.CharField(blank=True, max_length=250)),
('language', models.CharField(max_length=3)),
('pos', models.CharField(max_length=25)),
('notes', models.CharField(blank=True, max_length=250)),
('added_date', models.DateTimeField(auto_now_add=True, verbose_name='date published')),
('contlex', models.CharField(blank=True, max_length=250)),
('type', models.CharField(blank=True, max_length=25)),
('lemmaId', models.CharField(blank=True, default='', max_length=250)),
('inflexId', models.CharField(blank=True, max_length=25)),
('inflexType', models.IntegerField(blank=True, choices=[(1, '1'), (2, '2'), (3, '3'), (4, '4'), (5, '5'), (99, 'X')], default=None, null=True)),
('deleted', models.BooleanField(default=False)),
('imported_from', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='manageXML.DataFile')),
],
options={
'unique_together': {('lexeme', 'pos', 'homoId', 'language')},
},
),
migrations.CreateModel(
name='Relation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.IntegerField(choices=[(0, 'Translation'), (1, 'Etymology'), (2, 'Compound'), (3, 'Derivation'), (99, 'Other')], default=0)),
('notes', models.CharField(blank=True, max_length=250)),
('checked', models.BooleanField(default=False)),
('added_date', models.DateTimeField(auto_now_add=True, verbose_name='date published')),
('deleted', models.BooleanField(default=False)),
('lexeme_from', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lexeme_from_lexeme_set', to='manageXML.Lexeme')),
('lexeme_to', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lexeme_to_lexeme_set', to='manageXML.Lexeme')),
],
options={
'unique_together': {('lexeme_from', 'lexeme_to', 'type')},
},
),
migrations.CreateModel(
name='MiniParadigm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('msd', models.CharField(max_length=25)),
('wordform', models.CharField(max_length=250)),
('lexeme', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='manageXML.Lexeme')),
],
),
migrations.CreateModel(
name='HistoricalSource',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('page', models.CharField(blank=True, max_length=25)),
('type', models.CharField(max_length=25)),
('notes', models.CharField(blank=True, max_length=250)),
('added_date', models.DateTimeField(blank=True, editable=False, verbose_name='date published')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('relation', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='manageXML.Relation')),
],
options={
'verbose_name': 'historical source',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalRelation',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('type', models.IntegerField(choices=[(0, 'Translation'), (1, 'Etymology'), (2, 'Compound'), (3, 'Derivation'), (99, 'Other')], default=0)),
('notes', models.CharField(blank=True, max_length=250)),
('checked', models.BooleanField(default=False)),
('added_date', models.DateTimeField(blank=True, editable=False, verbose_name='date published')),
('deleted', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('lexeme_from', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='manageXML.Lexeme')),
('lexeme_to', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='manageXML.Lexeme')),
],
options={
'verbose_name': 'historical relation',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalMiniParadigm',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('msd', models.CharField(max_length=25)),
('wordform', models.CharField(max_length=250)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('lexeme', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='manageXML.Lexeme')),
],
options={
'verbose_name': 'historical mini paradigm',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalLexeme',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('lexeme', models.CharField(max_length=250)),
('homoId', models.IntegerField(default=0)),
('assonance', models.CharField(blank=True, max_length=250)),
('assonance_rev', models.CharField(blank=True, max_length=250)),
('consonance', models.CharField(blank=True, max_length=250)),
('consonance_rev', models.CharField(blank=True, max_length=250)),
('language', models.CharField(max_length=3)),
('pos', models.CharField(max_length=25)),
('notes', models.CharField(blank=True, max_length=250)),
('added_date', models.DateTimeField(blank=True, editable=False, verbose_name='date published')),
('contlex', models.CharField(blank=True, max_length=250)),
('type', models.CharField(blank=True, max_length=25)),
('lemmaId', models.CharField(blank=True, default='', max_length=250)),
('inflexId', models.CharField(blank=True, max_length=25)),
('inflexType', models.IntegerField(blank=True, choices=[(1, '1'), (2, '2'), (3, '3'), (4, '4'), (5, '5'), (99, 'X')], default=None, null=True)),
('deleted', models.BooleanField(default=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('imported_from', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='manageXML.DataFile')),
],
options={
'verbose_name': 'historical lexeme',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalExamples',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('text', models.CharField(max_length=250)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('lexeme', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='manageXML.Lexeme')),
],
options={
'verbose_name': 'historical examples',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='Source',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('page', models.CharField(blank=True, max_length=25)),
('type', models.CharField(max_length=25)),
('notes', models.CharField(blank=True, max_length=250)),
('added_date', models.DateTimeField(auto_now_add=True, verbose_name='date published')),
('relation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='manageXML.Relation')),
],
options={
'unique_together': {('relation', 'name')},
},
),
migrations.CreateModel(
name='Examples',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=250)),
('lexeme', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='manageXML.Lexeme')),
],
options={
'unique_together': {('lexeme', 'text')},
},
),
migrations.CreateModel(
name='Affiliation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('lexeme', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='manageXML.Lexeme')),
],
options={
'unique_together': {('lexeme', 'title')},
},
),
]
| 61.350211
| 188
| 0.5826
| 1,462
| 14,540
| 5.610123
| 0.091655
| 0.098756
| 0.040966
| 0.073153
| 0.893197
| 0.884906
| 0.877103
| 0.867715
| 0.86674
| 0.855401
| 0
| 0.017281
| 0.255777
| 14,540
| 236
| 189
| 61.610169
| 0.740689
| 0.003095
| 0
| 0.764192
| 1
| 0
| 0.156627
| 0.010281
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026201
| 0
| 0.043668
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7f9de97ad0183c1adf7514ee049023168d444962
| 50
|
py
|
Python
|
assets/shaders/white.py
|
E15dev/pygame-shader-render
|
5a773b762c6e8013c1f011a02f8fb0bc2731f86a
|
[
"MIT"
] | 2
|
2022-02-06T19:58:26.000Z
|
2022-03-09T10:40:17.000Z
|
assets/shaders/white.py
|
E15dev/pygame-shader-render
|
5a773b762c6e8013c1f011a02f8fb0bc2731f86a
|
[
"MIT"
] | null | null | null |
assets/shaders/white.py
|
E15dev/pygame-shader-render
|
5a773b762c6e8013c1f011a02f8fb0bc2731f86a
|
[
"MIT"
] | null | null | null |
def shader(x, y, z):
return (255, 255, 255)
| 16.666667
| 27
| 0.54
| 9
| 50
| 3
| 0.777778
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.28
| 50
| 2
| 28
| 25
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7fb2d8529fdc1fb892f50f0f0158d0927199801d
| 23,437
|
py
|
Python
|
CubeSolver.py
|
mattiagiuri/rubikpy
|
fbaddca587833a746231dd596dfc363f6acef107
|
[
"Apache-2.0"
] | 2
|
2020-12-03T22:29:43.000Z
|
2022-02-09T02:57:04.000Z
|
CubeSolver.py
|
mattiagiuri/rubikpy
|
fbaddca587833a746231dd596dfc363f6acef107
|
[
"Apache-2.0"
] | null | null | null |
CubeSolver.py
|
mattiagiuri/rubikpy
|
fbaddca587833a746231dd596dfc363f6acef107
|
[
"Apache-2.0"
] | null | null | null |
# input must contain faces in this order: white, red, green, orange, blue, yellow
import numpy as np
from CubeMover import CubeMover
class CubeSolver:
def __init__(self, cube):
self.mover = CubeMover(cube)
self.cube = cube
self.yellow_crossed = False
self.yellow_vertexes = False
self.solve_cube()
def sexy_moves(self):
self.mover.R()
self.mover.U()
self.mover.inv_R()
self.mover.inv_U()
def anti_sexy_moves(self):
self.mover.U()
self.mover.R()
self.mover.inv_U()
self.mover.inv_R()
def adjust_white_edge(self, name):
c = self.mover.cube_encoder.edges[name].coordinates
cube = self.mover.cube
if c == (1, 0, -1):
if cube[2][1][0] == 'W':
self.mover.F()
else:
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
elif c == (-1, 0, -1):
if cube[4][1][2] == 'W':
self.mover.inv_F()
else:
self.mover.inv_D()
self.mover.L()
self.mover.D()
elif c == (0, 1, -1):
if cube[1][0][1] == 'W':
self.mover.F()
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
else:
self.mover.F()
self.mover.F()
elif c == (1, 1, 0):
self.mover.U()
if cube[1][0][1] == 'W':
self.mover.F()
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
else:
self.mover.F()
self.mover.F()
elif c == (0, 1, 1):
self.mover.U()
self.mover.U()
if cube[1][0][1] == 'W':
self.mover.F()
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
else:
self.mover.F()
self.mover.F()
elif c == (-1, 1, 0):
self.mover.inv_U()
if cube[1][0][1] == 'W':
self.mover.F()
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
else:
self.mover.F()
self.mover.F()
elif c == (1, 0, 1):
self.mover.B()
self.mover.U()
self.mover.U()
self.mover.inv_B()
if cube[1][0][1] == 'W':
self.mover.F()
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
else:
self.mover.F()
self.mover.F()
elif c == (-1, 0, 1):
self.mover.inv_B()
self.mover.U()
self.mover.U()
self.mover.B()
if cube[1][0][1] == 'W':
self.mover.F()
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
else:
self.mover.F()
self.mover.F()
elif c == (0, -1, -1):
if cube[1][2][1] == 'W':
self.mover.inv_F()
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
elif c == (-1, -1, 0):
self.mover.inv_L()
if cube[4][1][2] == 'W':
self.mover.inv_F()
else:
self.mover.inv_D()
self.mover.L()
self.mover.D()
elif c == (0, -1, 1):
self.mover.B()
self.mover.B()
self.mover.U()
self.mover.U()
if cube[1][0][1] == 'W':
self.mover.F()
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
else:
self.mover.F()
self.mover.F()
elif c == (1, -1, 0):
self.mover.R()
if cube[2][1][0] == 'W':
self.mover.F()
else:
self.mover.D()
self.mover.inv_R()
self.mover.inv_D()
def solve_white_cross(self):
self.adjust_white_edge('WR')
self.mover.inv_D()
self.adjust_white_edge('WG')
self.mover.inv_D()
self.adjust_white_edge('WO')
self.mover.inv_D()
self.adjust_white_edge('WU')
self.mover.inv_D()
self.mover.moves = self.mover.moves+'\n'
def solve_vertex_base_case(self, cube):
if cube[5][2][2] == 'W':
self.sexy_moves()
self.sexy_moves()
self.sexy_moves()
elif cube[2][0][0] == 'W':
self.sexy_moves()
else:
self.anti_sexy_moves()
def adjust_white_vertex(self, name):
c = self.mover.cube_encoder.vertexes[name].coordinates
cube = self.mover.cube
if c == (1, 1, -1):
self.solve_vertex_base_case(cube)
elif c == (1, 1, 1):
self.mover.U()
self.solve_vertex_base_case(cube)
elif c == (-1, 1, 1):
self.mover.U()
self.mover.U()
self.solve_vertex_base_case(cube)
elif c == (-1, 1, -1):
self.mover.inv_U()
self.solve_vertex_base_case(cube)
elif c == (1, -1, -1):
if not cube[0][0][2] == 'W':
self.sexy_moves()
self.solve_vertex_base_case(cube)
elif c == (1, -1, 1):
self.mover.B()
self.mover.U()
self.mover.inv_B()
self.solve_vertex_base_case(cube)
elif c == (-1, -1, 1):
self.mover.L()
self.mover.U()
self.mover.U()
self.mover.inv_L()
self.solve_vertex_base_case(cube)
elif c == (-1, -1, -1):
self.mover.F()
self.mover.inv_U()
self.mover.inv_F()
self.mover.inv_U()
self.solve_vertex_base_case(cube)
def finish_white_face(self):
self.adjust_white_vertex('WRG')
self.mover.inv_D()
self.mover.moves = self.mover.moves + '\n'
self.adjust_white_vertex('WGO')
self.mover.inv_D()
self.mover.moves = self.mover.moves + '\n'
self.adjust_white_vertex('WOU')
self.mover.inv_D()
self.mover.moves = self.mover.moves + '\n'
self.adjust_white_vertex('WRU')
self.mover.inv_D()
self.mover.moves = self.mover.moves + '\n'
def R_to_G(self):
self.anti_sexy_moves()
self.mover.inv_U()
self.mover.inv_F()
self.mover.U()
self.mover.F()
def R_to_U(self):
self.mover.inv_U()
self.mover.inv_L()
self.mover.U()
self.mover.L()
self.mover.U()
self.mover.F()
self.mover.inv_U()
self.mover.inv_F()
def G_to_R(self):
self.mover.inv_U()
self.mover.inv_F()
self.mover.U()
self.mover.F()
self.mover.U()
self.mover.R()
self.mover.inv_U()
self.mover.inv_R()
def G_to_O(self):
self.mover.U()
self.mover.B()
self.mover.inv_U()
self.mover.inv_B()
self.mover.inv_U()
self.mover.inv_R()
self.mover.U()
self.mover.R()
def O_to_G(self):
self.mover.inv_U()
self.mover.inv_R()
self.mover.U()
self.mover.R()
self.mover.U()
self.mover.B()
self.mover.inv_U()
self.mover.inv_B()
def O_to_U(self):
self.mover.U()
self.mover.L()
self.mover.inv_U()
self.mover.inv_L()
self.mover.inv_U()
self.mover.inv_B()
self.mover.U()
self.mover.B()
def U_to_O(self):
self.mover.inv_U()
self.mover.inv_B()
self.mover.U()
self.mover.B()
self.mover.U()
self.mover.L()
self.mover.inv_U()
self.mover.inv_L()
def U_to_R(self):
self.mover.U()
self.mover.F()
self.mover.inv_U()
self.mover.inv_F()
self.mover.inv_U()
self.mover.inv_L()
self.mover.U()
self.mover.L()
def adjust_RG(self):
c = self.mover.cube_encoder.edges['RG'].coordinates
cube = self.mover.cube
if c == (1, 1, 0):
if cube[2][0][1] == 'G':
self.G_to_R()
else:
self.mover.U()
self.R_to_G()
elif c == (0, 1, -1):
if cube[1][0][1] == 'R':
self.R_to_G()
else:
self.mover.inv_U()
self.G_to_R()
elif c == (-1, 1, 0):
if cube[4][0][1] == 'R':
self.mover.inv_U()
self.R_to_G()
else:
self.mover.U()
self.mover.U()
self.G_to_R()
elif c == (0, 1, 1):
if cube[3][0][1] == 'G':
self.mover.U()
self.G_to_R()
else:
self.mover.U()
self.mover.U()
self.R_to_G()
elif c == (1, 0, -1):
if cube[1][1][2] == 'G':
self.R_to_G()
self.mover.U()
self.mover.U()
self.R_to_G()
elif c == (1, 0, 1):
self.G_to_O()
if cube[4][0][1] == 'R':
self.mover.inv_U()
self.R_to_G()
else:
self.mover.U()
self.mover.U()
self.G_to_R()
elif c == (-1, 0, 1):
self.O_to_U()
if cube[1][0][1] == 'R':
self.R_to_G()
else:
self.mover.inv_U()
self.G_to_R()
elif c == (-1, 0, -1):
self.U_to_R()
if cube[2][0][1] == 'G':
self.G_to_R()
else:
self.mover.U()
self.R_to_G()
def adjust_GO(self):
c = self.mover.cube_encoder.edges['GO'].coordinates
cube = self.mover.cube
if c == (1, 1, 0):
if cube[2][0][1] == 'O':
self.mover.inv_U()
self.O_to_G()
else:
self.G_to_O()
elif c == (0, 1, -1):
if cube[1][0][1] == 'O':
self.mover.U()
self.mover.U()
self.O_to_G()
else:
self.mover.inv_U()
self.G_to_O()
elif c == (-1, 1, 0):
if cube[4][0][1] == 'O':
self.mover.U()
self.O_to_G()
else:
self.mover.U()
self.mover.U()
self.G_to_O()
elif c == (0, 1, 1):
if cube[3][0][1] == 'O':
self.O_to_G()
else:
self.mover.U()
self.G_to_O()
elif c == (1, 0, -1):
self.R_to_G()
if cube[3][0][1] == 'O':
self.O_to_G()
else:
self.mover.U()
self.G_to_O()
elif c == (1, 0, 1):
if cube[2][1][2] == 'O':
self.G_to_O()
self.mover.U()
self.mover.U()
self.G_to_O()
elif c == (-1, 0, 1):
self.O_to_U()
if cube[1][0][1] == 'O':
self.mover.U()
self.mover.U()
self.O_to_G()
else:
self.mover.inv_U()
self.G_to_O()
elif c == (-1, 0, -1):
self.U_to_R()
if cube[2][0][1] == 'O':
self.mover.inv_U()
self.O_to_G()
else:
self.G_to_O()
def adjust_OU(self):
c = self.mover.cube_encoder.edges['OU'].coordinates
cube = self.mover.cube
if c == (1, 1, 0):
if cube[2][0][1] == 'U':
self.mover.U()
self.mover.U()
self.U_to_O()
else:
self.mover.inv_U()
self.O_to_U()
elif c == (0, 1, -1):
if cube[1][0][1] == 'U':
self.mover.U()
self.U_to_O()
else:
self.mover.U()
self.mover.U()
self.O_to_U()
elif c == (-1, 1, 0):
if cube[4][0][1] == 'U':
self.U_to_O()
else:
self.mover.U()
self.O_to_U()
elif c == (0, 1, 1):
if cube[3][0][1] == 'U':
self.mover.inv_U()
self.U_to_O()
else:
self.O_to_U()
elif c == (1, 0, -1):
self.R_to_G()
if cube[3][0][1] == 'U':
self.mover.inv_U()
self.U_to_O()
else:
self.O_to_U()
elif c == (1, 0, 1):
self.G_to_O()
if cube[4][0][1] == 'U':
self.U_to_O()
else:
self.mover.U()
self.O_to_U()
elif c == (-1, 0, 1):
if cube[3][1][2] == 'U':
self.O_to_U()
self.mover.U()
self.mover.U()
self.O_to_U()
elif c == (-1, 0, -1):
self.U_to_R()
if cube[2][0][1] == 'U':
self.mover.U()
self.mover.U()
self.U_to_O()
else:
self.mover.inv_U()
self.O_to_U()
def adjust_RU(self):
c = self.mover.cube_encoder.edges['RU'].coordinates
cube = self.mover.cube
if c == (1, 1, 0):
if cube[2][0][1] == 'R':
self.mover.U()
self.R_to_U()
else:
self.mover.U()
self.mover.U()
self.U_to_R()
elif c == (0, 1, -1):
if cube[1][0][1] == 'R':
self.R_to_U()
else:
self.mover.U()
self.U_to_R()
elif c == (-1, 1, 0):
if cube[4][0][1] == 'R':
self.mover.inv_U()
self.R_to_U()
else:
self.U_to_R()
elif c == (0, 1, 1):
if cube[3][0][1] == 'R':
self.mover.U()
self.mover.U()
self.R_to_U()
else:
self.mover.inv_U()
self.U_to_R()
elif c == (1, 0, -1):
self.R_to_G()
if cube[3][0][1] == 'R':
self.mover.U()
self.mover.U()
self.R_to_U()
else:
self.mover.inv_U()
self.U_to_R()
elif c == (1, 0, 1):
self.G_to_O()
if cube[4][0][1] == 'R':
self.mover.inv_U()
self.R_to_U()
else:
self.U_to_R()
elif c == (-1, 0, 1):
self.O_to_U()
if cube[1][0][1] == 'R':
self.R_to_U()
else:
self.mover.U()
self.U_to_R()
elif c == (-1, 0, -1):
if cube[4][1][2] == 'R':
self.U_to_R()
self.mover.U()
self.mover.U()
self.U_to_R()
def solve_second_layer(self):
self.adjust_RG()
self.mover.moves = self.mover.moves + '\n'
self.adjust_GO()
self.mover.moves = self.mover.moves + '\n'
self.adjust_OU()
self.mover.moves = self.mover.moves + '\n'
self.adjust_RU()
self.mover.moves = self.mover.moves + '\n'
def solve_L(self):
self.mover.F()
self.mover.U()
self.mover.R()
self.mover.inv_U()
self.mover.inv_R()
self.mover.inv_F()
def solve_line(self):
self.mover.F()
self.mover.R()
self.mover.U()
self.mover.inv_R()
self.mover.inv_U()
self.mover.inv_F()
def make_yellow_cross(self):
cube = self.mover.cube
if not (cube[5][0][1] == 'Y' and cube[5][1][0] == 'Y' and cube[5][1][2] == 'Y' and cube[5][2][1] == 'Y'):
if not (cube[5][0][1] == 'Y' or cube[5][1][0] == 'Y' or cube[5][1][2] == 'Y' or cube[5][2][1] == 'Y'):
self.solve_line()
self.solve_L()
elif cube[5][0][1] == 'Y' and cube[5][1][0] == 'Y':
self.solve_L()
elif cube[5][0][1] == 'Y' and cube[5][1][2] == 'Y':
self.mover.inv_U()
self.solve_L()
elif cube[5][2][1] == 'Y' and cube[5][1][2] == 'Y':
self.mover.U()
self.mover.U()
self.solve_L()
elif cube[5][2][1] == 'Y' and cube[5][1][2] == 'Y':
self.mover.U()
self.solve_L()
elif cube[5][1][0] == 'Y' and cube[5][1][2] == 'Y':
self.solve_line()
elif cube[5][0][1] == 'Y' and cube[5][2][1] == 'Y':
self.mover.U()
self.solve_line()
self.mover.moves = self.mover.moves + '\n'
def sune_R(self):
self.mover.R()
self.mover.U()
self.mover.inv_R()
self.mover.U()
self.mover.R()
self.mover.U()
self.mover.U()
self.mover.inv_R()
def sune_G(self):
self.mover.B()
self.mover.U()
self.mover.inv_B()
self.mover.U()
self.mover.B()
self.mover.U()
self.mover.U()
self.mover.inv_B()
def sune_O(self):
self.mover.L()
self.mover.U()
self.mover.inv_L()
self.mover.U()
self.mover.L()
self.mover.U()
self.mover.U()
self.mover.inv_L()
def sune_U(self):
self.mover.F()
self.mover.U()
self.mover.inv_F()
self.mover.U()
self.mover.F()
self.mover.U()
self.mover.U()
self.mover.inv_F()
def check_yellow_cross_positions(self):
cube = self.mover.cube
if cube[1][0][1] == 'R' and cube[4][0][1] == 'U':
self.sune_O()
self.yellow_crossed = True
elif cube[1][0][1] == 'R' and cube[2][0][1] == 'G':
self.sune_U()
self.yellow_crossed = True
elif cube[2][0][1] == 'G' and cube[3][0][1] == 'O':
self.sune_R()
self.yellow_crossed = True
elif cube[3][0][1] == 'O' and cube[4][0][1] == 'U':
self.sune_G()
self.yellow_crossed = True
elif cube[1][0][1] == 'R' and cube[3][0][1] == 'O':
self.sune_G()
self.sune_R()
self.yellow_crossed = True
elif cube[4][0][1] == 'U' and cube[2][0][1] == 'G':
self.sune_R()
self.sune_U()
self.yellow_crossed = True
self.mover.U()
self.mover.moves = self.mover.moves + '\n'
def adjust_yellow_cross_edges(self):
cube = self.mover.cube
a = (cube[1][0][1] == 'R' and cube[2][0][1] == 'G' and cube[3][0][1] == 'O' and cube[4][0][1] == 'U')
b = (cube[2][0][1] == 'R' and cube[3][0][1] == 'G' and cube[4][0][1] == 'O' and cube[1][0][1] == 'U')
c = (cube[3][0][1] == 'R' and cube[4][0][1] == 'G' and cube[1][0][1] == 'O' and cube[2][0][1] == 'U')
d = (cube[4][0][1] == 'R' and cube[1][0][1] == 'G' and cube[2][0][1] == 'O' and cube[3][0][1] == 'U')
if a:
self.yellow_crossed = True
self.mover.moves = self.mover.moves + '\n'
elif b:
self.yellow_crossed = True
self.mover.U()
self.mover.moves = self.mover.moves + '\n'
elif c:
self.yellow_crossed = True
self.mover.U()
self.mover.U()
self.mover.moves = self.mover.moves + '\n'
elif d:
self.yellow_crossed = True
self.mover.inv_U()
self.mover.moves = self.mover.moves + '\n'
elif not (a or b or c or d):
while not self.yellow_crossed:
self.check_yellow_cross_positions()
def check_yellow_vertexes(self):
v = self.mover.cube_encoder.vertexes
if v['RGY'].coordinates == v['RGY'].final_coordinates:
self.mover.B()
self.mover.inv_U()
self.mover.inv_F()
self.mover.U()
self.mover.inv_B()
self.mover.inv_U()
self.mover.F()
self.mover.U()
elif v['GOY'].coordinates == v['GOY'].final_coordinates:
self.mover.L()
self.mover.inv_U()
self.mover.inv_R()
self.mover.U()
self.mover.inv_L()
self.mover.inv_U()
self.mover.R()
self.mover.U()
elif v['OUY'].coordinates == v['OUY'].final_coordinates:
self.mover.F()
self.mover.inv_U()
self.mover.inv_B()
self.mover.U()
self.mover.inv_F()
self.mover.inv_U()
self.mover.B()
self.mover.U()
elif v['RUY'].coordinates == v['RUY'].final_coordinates:
self.mover.R()
self.mover.inv_U()
self.mover.inv_L()
self.mover.U()
self.mover.inv_R()
self.mover.inv_U()
self.mover.L()
self.mover.U()
else:
self.mover.R()
self.mover.inv_U()
self.mover.inv_L()
self.mover.U()
self.mover.inv_R()
self.mover.inv_U()
self.mover.L()
self.mover.U()
def adjust_yellow_vertexes(self):
v = self.mover.cube_encoder.vertexes
while not self.yellow_vertexes:
self.check_yellow_vertexes()
if v['RGY'].coordinates == v['RGY'].final_coordinates and v['GOY'].coordinates == v['GOY'].final_coordinates and v['OUY'].coordinates == v['OUY'].final_coordinates and v['RUY'].coordinates == v['RUY'].final_coordinates:
self.yellow_vertexes = True
self.mover.moves = self.mover.moves+'\n'
def final_sexy_moves(self):
self.mover.L()
self.mover.D()
self.mover.inv_L()
self.mover.inv_D()
def finish_yellow_face(self):
cube = self.mover.cube
for i in range(4):
if cube[1][0][0] == 'Y':
self.final_sexy_moves()
self.final_sexy_moves()
self.final_sexy_moves()
self.final_sexy_moves()
elif cube[4][0][2] == 'Y':
self.final_sexy_moves()
self.final_sexy_moves()
self.mover.U()
self.mover.moves = self.mover.moves + '\n'
def solve_cube(self):
self.solve_white_cross()
self.finish_white_face()
self.solve_second_layer()
self.make_yellow_cross()
self.adjust_yellow_cross_edges()
self.adjust_yellow_vertexes()
self.finish_yellow_face()
| 27.868014
| 231
| 0.41891
| 3,112
| 23,437
| 3.000964
| 0.032776
| 0.369097
| 0.168326
| 0.157404
| 0.884891
| 0.852661
| 0.822572
| 0.771175
| 0.72406
| 0.647821
| 0
| 0.033024
| 0.421172
| 23,437
| 840
| 232
| 27.90119
| 0.655388
| 0.003371
| 0
| 0.822314
| 0
| 0
| 0.008863
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048209
| false
| 0
| 0.002755
| 0
| 0.052342
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f6f0e7e9ee03e11df638f4c93af2cd8a5f786675
| 105,392
|
py
|
Python
|
infoblox_netmri/api/broker/v3_8_0/if_perf_daily_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 12
|
2016-02-19T12:37:54.000Z
|
2022-03-04T20:11:08.000Z
|
infoblox_netmri/api/broker/v3_8_0/if_perf_daily_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 18
|
2015-11-12T18:37:00.000Z
|
2021-05-19T07:59:55.000Z
|
infoblox_netmri/api/broker/v3_8_0/if_perf_daily_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 18
|
2016-01-07T12:04:34.000Z
|
2022-03-31T11:05:41.000Z
|
from ..broker import Broker
class IfPerfDailyBroker(Broker):
controller = "if_perf_dailies"
def index(self, **kwargs):
"""Lists the available if perf dailies. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which interface daily performance information was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which interface daily performance information was collected.
:type DeviceID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` today
:param starttime: The data returned will represent the if perf dailies with this date and time as lower boundary. If omitted, the result will indicate the most recently collected data.
:type starttime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` tomorrow
:param endtime: The data returned will represent the if perf dailies with this date and time as upper boundary. If omitted, the result will indicate the most recently collected data.
:type endtime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceID
:param sort: The data field(s) to use for sorting the output. Default is DeviceID. Valid values are DataSourceID, StartTime, EndTime, DeviceID, ifIndex, ifTotalChanges, ifInOctets, ifInUcastPkts, ifInNUcastPkts, ifInMulticastPkts, ifInBroadcastPkts, ifInDiscards, ifInErrors, ifOutOctets, ifOutUcastPkts, ifOutNUcastPkts, ifOutMulticastPkts, ifOutBroadcastPkts, ifOutDiscards, ifOutErrors, ifAlignmentErrors, ifFCSErrors, ifLateCollisions, InThru, OutThru, TotalThru, InUtil, OutUtil, TotalUtil, InErrorPct, OutErrorPct, TotalErrorPct, InBcastPct, OutBcastPct, TotalBcastPct, InDiscardPct, OutDiscardPct, TotalDiscardPct.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each IfPerfDaily. Valid values are DataSourceID, StartTime, EndTime, DeviceID, ifIndex, ifTotalChanges, ifInOctets, ifInUcastPkts, ifInNUcastPkts, ifInMulticastPkts, ifInBroadcastPkts, ifInDiscards, ifInErrors, ifOutOctets, ifOutUcastPkts, ifOutNUcastPkts, ifOutMulticastPkts, ifOutBroadcastPkts, ifOutDiscards, ifOutErrors, ifAlignmentErrors, ifFCSErrors, ifLateCollisions, InThru, OutThru, TotalThru, InUtil, OutUtil, TotalUtil, InErrorPct, OutErrorPct, TotalErrorPct, InBcastPct, OutBcastPct, TotalBcastPct, InDiscardPct, OutDiscardPct, TotalDiscardPct. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return if_perf_dailies: An array of the IfPerfDaily objects that match the specified input criteria.
:rtype if_perf_dailies: Array of IfPerfDaily
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available if perf dailies matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which interface daily performance information was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier for the device from which interface daily performance information was collected.
:type DeviceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param EndTime: The date and time the record was last modified in NetMRI.
:type EndTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param EndTime: The date and time the record was last modified in NetMRI.
:type EndTime: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param InBcastPct: The total number of incoming broadcast packets.
:type InBcastPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InBcastPct: The total number of incoming broadcast packets.
:type InBcastPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param InDiscardPct: The total number of incoming discarded packets.
:type InDiscardPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InDiscardPct: The total number of incoming discarded packets.
:type InDiscardPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param InErrorPct: The total number of incoming error packets.
:type InErrorPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InErrorPct: The total number of incoming error packets.
:type InErrorPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param InThru: The number of packets coming from the starting point.
:type InThru: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InThru: The number of packets coming from the starting point.
:type InThru: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param InUtil: Incoming utilities of each interface.
:type InUtil: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InUtil: Incoming utilities of each interface.
:type InUtil: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param OutBcastPct: The total number of outgoing broadcast packets.
:type OutBcastPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OutBcastPct: The total number of outgoing broadcast packets.
:type OutBcastPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param OutDiscardPct: The total number of outgoing discarded packets.
:type OutDiscardPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OutDiscardPct: The total number of outgoing discarded packets.
:type OutDiscardPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param OutErrorPct: The total number of outgoing error packets.
:type OutErrorPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OutErrorPct: The total number of outgoing error packets.
:type OutErrorPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param OutThru: The number of packets reaching the destination point.
:type OutThru: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OutThru: The number of packets reaching the destination point.
:type OutThru: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param OutUtil: Outgoing utilities of each interface.
:type OutUtil: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param OutUtil: Outgoing utilities of each interface.
:type OutUtil: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param StartTime: The date and time the record was initially created in NetMRI.
:type StartTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param StartTime: The date and time the record was initially created in NetMRI.
:type StartTime: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param TotalBcastPct: The total number of Broadcasting Packets.
:type TotalBcastPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param TotalBcastPct: The total number of Broadcasting Packets.
:type TotalBcastPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param TotalDiscardPct: The total number of discard packets in each interface.
:type TotalDiscardPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param TotalDiscardPct: The total number of discard packets in each interface.
:type TotalDiscardPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param TotalErrorPct: The total number of error packets.
:type TotalErrorPct: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param TotalErrorPct: The total number of error packets.
:type TotalErrorPct: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param TotalThru: The total number of packets passing through an interface.
:type TotalThru: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param TotalThru: The total number of packets passing through an interface.
:type TotalThru: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param TotalUtil: The total number of utilities used in each interface.
:type TotalUtil: Float
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param TotalUtil: The total number of utilities used in each interface.
:type TotalUtil: Array of Float
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifAlignmentErrors: The alignment errors of each interface.
:type ifAlignmentErrors: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifAlignmentErrors: The alignment errors of each interface.
:type ifAlignmentErrors: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifFCSErrors: The FCS Errors of each interface.
:type ifFCSErrors: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifFCSErrors: The FCS Errors of each interface.
:type ifFCSErrors: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifInBroadcastPkts: The number of incoming broadcast packets of an interface.
:type ifInBroadcastPkts: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifInBroadcastPkts: The number of incoming broadcast packets of an interface.
:type ifInBroadcastPkts: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifInDiscards: The number of incoming discard packets of an interface.
:type ifInDiscards: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifInDiscards: The number of incoming discard packets of an interface.
:type ifInDiscards: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifInErrors: The number of incoming errors of an interface.
:type ifInErrors: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifInErrors: The number of incoming errors of an interface.
:type ifInErrors: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifInMulticastPkts: The number of incoming multicast packets of an interface.
:type ifInMulticastPkts: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifInMulticastPkts: The number of incoming multicast packets of an interface.
:type ifInMulticastPkts: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifInNUcastPkts: The number of non unicast packets of local interface daily performance.
:type ifInNUcastPkts: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifInNUcastPkts: The number of non unicast packets of local interface daily performance.
:type ifInNUcastPkts: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifInOctets: The number of incoming octets in interface daily performance.
:type ifInOctets: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifInOctets: The number of incoming octets in interface daily performance.
:type ifInOctets: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifInUcastPkts: The number of Incoming unicast packets of local interface daily performance.
:type ifInUcastPkts: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifInUcastPkts: The number of Incoming unicast packets of local interface daily performance.
:type ifInUcastPkts: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifIndex: The current index of local interface for the interface daily performance table entry.
:type ifIndex: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifIndex: The current index of local interface for the interface daily performance table entry.
:type ifIndex: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifLateCollisions: It describes a late collisions of daily performance interface.
:type ifLateCollisions: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifLateCollisions: It describes a late collisions of daily performance interface.
:type ifLateCollisions: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifOutBroadcastPkts: The outgoing broadcast packets of each interface.
:type ifOutBroadcastPkts: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifOutBroadcastPkts: The outgoing broadcast packets of each interface.
:type ifOutBroadcastPkts: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifOutDiscards: The outgoing discarded packets of an interface.
:type ifOutDiscards: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifOutDiscards: The outgoing discarded packets of an interface.
:type ifOutDiscards: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifOutErrors: The outgoing errors of an interface.
:type ifOutErrors: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifOutErrors: The outgoing errors of an interface.
:type ifOutErrors: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifOutMulticastPkts: The outgoing multicast packets of each interface.
:type ifOutMulticastPkts: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifOutMulticastPkts: The outgoing multicast packets of each interface.
:type ifOutMulticastPkts: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifOutNUcastPkts: The outgoing non unicast packets of an interface.
:type ifOutNUcastPkts: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifOutNUcastPkts: The outgoing non unicast packets of an interface.
:type ifOutNUcastPkts: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifOutOctets: The number of outgoing octets.
:type ifOutOctets: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifOutOctets: The number of outgoing octets.
:type ifOutOctets: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifOutUcastPkts: The outgoing unicast packets of an interface.
:type ifOutUcastPkts: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifOutUcastPkts: The outgoing unicast packets of an interface.
:type ifOutUcastPkts: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifTotalChanges: The total number of changes occurs in each interface.
:type ifTotalChanges: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifTotalChanges: The total number of changes occurs in each interface.
:type ifTotalChanges: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` today
:param starttime: The data returned will represent the if perf dailies with this date and time as lower boundary. If omitted, the result will indicate the most recently collected data.
:type starttime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` tomorrow
:param endtime: The data returned will represent the if perf dailies with this date and time as upper boundary. If omitted, the result will indicate the most recently collected data.
:type endtime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceID
:param sort: The data field(s) to use for sorting the output. Default is DeviceID. Valid values are DataSourceID, StartTime, EndTime, DeviceID, ifIndex, ifTotalChanges, ifInOctets, ifInUcastPkts, ifInNUcastPkts, ifInMulticastPkts, ifInBroadcastPkts, ifInDiscards, ifInErrors, ifOutOctets, ifOutUcastPkts, ifOutNUcastPkts, ifOutMulticastPkts, ifOutBroadcastPkts, ifOutDiscards, ifOutErrors, ifAlignmentErrors, ifFCSErrors, ifLateCollisions, InThru, OutThru, TotalThru, InUtil, OutUtil, TotalUtil, InErrorPct, OutErrorPct, TotalErrorPct, InBcastPct, OutBcastPct, TotalBcastPct, InDiscardPct, OutDiscardPct, TotalDiscardPct.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each IfPerfDaily. Valid values are DataSourceID, StartTime, EndTime, DeviceID, ifIndex, ifTotalChanges, ifInOctets, ifInUcastPkts, ifInNUcastPkts, ifInMulticastPkts, ifInBroadcastPkts, ifInDiscards, ifInErrors, ifOutOctets, ifOutUcastPkts, ifOutNUcastPkts, ifOutMulticastPkts, ifOutBroadcastPkts, ifOutDiscards, ifOutErrors, ifAlignmentErrors, ifFCSErrors, ifLateCollisions, InThru, OutThru, TotalThru, InUtil, OutUtil, TotalUtil, InErrorPct, OutErrorPct, TotalErrorPct, InBcastPct, OutBcastPct, TotalBcastPct, InDiscardPct, OutDiscardPct, TotalDiscardPct. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against if perf dailies, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, DeviceID, EndTime, InBcastPct, InDiscardPct, InErrorPct, InThru, InUtil, OutBcastPct, OutDiscardPct, OutErrorPct, OutThru, OutUtil, StartTime, TotalBcastPct, TotalDiscardPct, TotalErrorPct, TotalThru, TotalUtil, ifAlignmentErrors, ifFCSErrors, ifInBroadcastPkts, ifInDiscards, ifInErrors, ifInMulticastPkts, ifInNUcastPkts, ifInOctets, ifInUcastPkts, ifIndex, ifLateCollisions, ifOutBroadcastPkts, ifOutDiscards, ifOutErrors, ifOutMulticastPkts, ifOutNUcastPkts, ifOutOctets, ifOutUcastPkts, ifTotalChanges.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return if_perf_dailies: An array of the IfPerfDaily objects that match the specified input criteria.
:rtype if_perf_dailies: Array of IfPerfDaily
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available if perf dailies matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, DeviceID, EndTime, InBcastPct, InDiscardPct, InErrorPct, InThru, InUtil, OutBcastPct, OutDiscardPct, OutErrorPct, OutThru, OutUtil, StartTime, TotalBcastPct, TotalDiscardPct, TotalErrorPct, TotalThru, TotalUtil, ifAlignmentErrors, ifFCSErrors, ifInBroadcastPkts, ifInDiscards, ifInErrors, ifInMulticastPkts, ifInNUcastPkts, ifInOctets, ifInUcastPkts, ifIndex, ifLateCollisions, ifOutBroadcastPkts, ifOutDiscards, ifOutErrors, ifOutMulticastPkts, ifOutNUcastPkts, ifOutOctets, ifOutUcastPkts, ifTotalChanges.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier for the device from which interface daily performance information was collected. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_EndTime: The operator to apply to the field EndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. EndTime: The date and time the record was last modified in NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_EndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_EndTime: If op_EndTime is specified, the field named in this input will be compared to the value in EndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_EndTime must be specified if op_EndTime is specified.
:type val_f_EndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_EndTime: If op_EndTime is specified, this value will be compared to the value in EndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_EndTime must be specified if op_EndTime is specified.
:type val_c_EndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_InBcastPct: The operator to apply to the field InBcastPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. InBcastPct: The total number of incoming broadcast packets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_InBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_InBcastPct: If op_InBcastPct is specified, the field named in this input will be compared to the value in InBcastPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_InBcastPct must be specified if op_InBcastPct is specified.
:type val_f_InBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_InBcastPct: If op_InBcastPct is specified, this value will be compared to the value in InBcastPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_InBcastPct must be specified if op_InBcastPct is specified.
:type val_c_InBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_InDiscardPct: The operator to apply to the field InDiscardPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. InDiscardPct: The total number of incoming discarded packets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_InDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_InDiscardPct: If op_InDiscardPct is specified, the field named in this input will be compared to the value in InDiscardPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_InDiscardPct must be specified if op_InDiscardPct is specified.
:type val_f_InDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_InDiscardPct: If op_InDiscardPct is specified, this value will be compared to the value in InDiscardPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_InDiscardPct must be specified if op_InDiscardPct is specified.
:type val_c_InDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_InErrorPct: The operator to apply to the field InErrorPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. InErrorPct: The total number of incoming error packets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_InErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_InErrorPct: If op_InErrorPct is specified, the field named in this input will be compared to the value in InErrorPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_InErrorPct must be specified if op_InErrorPct is specified.
:type val_f_InErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_InErrorPct: If op_InErrorPct is specified, this value will be compared to the value in InErrorPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_InErrorPct must be specified if op_InErrorPct is specified.
:type val_c_InErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_InThru: The operator to apply to the field InThru. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. InThru: The number of packets coming from the starting point. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_InThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_InThru: If op_InThru is specified, the field named in this input will be compared to the value in InThru using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_InThru must be specified if op_InThru is specified.
:type val_f_InThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_InThru: If op_InThru is specified, this value will be compared to the value in InThru using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_InThru must be specified if op_InThru is specified.
:type val_c_InThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_InUtil: The operator to apply to the field InUtil. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. InUtil: Incoming utilities of each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_InUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_InUtil: If op_InUtil is specified, the field named in this input will be compared to the value in InUtil using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_InUtil must be specified if op_InUtil is specified.
:type val_f_InUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_InUtil: If op_InUtil is specified, this value will be compared to the value in InUtil using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_InUtil must be specified if op_InUtil is specified.
:type val_c_InUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OutBcastPct: The operator to apply to the field OutBcastPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OutBcastPct: The total number of outgoing broadcast packets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OutBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OutBcastPct: If op_OutBcastPct is specified, the field named in this input will be compared to the value in OutBcastPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OutBcastPct must be specified if op_OutBcastPct is specified.
:type val_f_OutBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OutBcastPct: If op_OutBcastPct is specified, this value will be compared to the value in OutBcastPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OutBcastPct must be specified if op_OutBcastPct is specified.
:type val_c_OutBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OutDiscardPct: The operator to apply to the field OutDiscardPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OutDiscardPct: The total number of outgoing discarded packets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OutDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OutDiscardPct: If op_OutDiscardPct is specified, the field named in this input will be compared to the value in OutDiscardPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OutDiscardPct must be specified if op_OutDiscardPct is specified.
:type val_f_OutDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OutDiscardPct: If op_OutDiscardPct is specified, this value will be compared to the value in OutDiscardPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OutDiscardPct must be specified if op_OutDiscardPct is specified.
:type val_c_OutDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OutErrorPct: The operator to apply to the field OutErrorPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OutErrorPct: The total number of outgoing error packets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OutErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OutErrorPct: If op_OutErrorPct is specified, the field named in this input will be compared to the value in OutErrorPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OutErrorPct must be specified if op_OutErrorPct is specified.
:type val_f_OutErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OutErrorPct: If op_OutErrorPct is specified, this value will be compared to the value in OutErrorPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OutErrorPct must be specified if op_OutErrorPct is specified.
:type val_c_OutErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OutThru: The operator to apply to the field OutThru. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OutThru: The number of packets reaching the destination point. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OutThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OutThru: If op_OutThru is specified, the field named in this input will be compared to the value in OutThru using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OutThru must be specified if op_OutThru is specified.
:type val_f_OutThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OutThru: If op_OutThru is specified, this value will be compared to the value in OutThru using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OutThru must be specified if op_OutThru is specified.
:type val_c_OutThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_OutUtil: The operator to apply to the field OutUtil. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. OutUtil: Outgoing utilities of each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_OutUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_OutUtil: If op_OutUtil is specified, the field named in this input will be compared to the value in OutUtil using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_OutUtil must be specified if op_OutUtil is specified.
:type val_f_OutUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_OutUtil: If op_OutUtil is specified, this value will be compared to the value in OutUtil using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_OutUtil must be specified if op_OutUtil is specified.
:type val_c_OutUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_StartTime: The operator to apply to the field StartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. StartTime: The date and time the record was initially created in NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_StartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_StartTime: If op_StartTime is specified, the field named in this input will be compared to the value in StartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_StartTime must be specified if op_StartTime is specified.
:type val_f_StartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_StartTime: If op_StartTime is specified, this value will be compared to the value in StartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_StartTime must be specified if op_StartTime is specified.
:type val_c_StartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_TotalBcastPct: The operator to apply to the field TotalBcastPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. TotalBcastPct: The total number of Broadcasting Packets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_TotalBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_TotalBcastPct: If op_TotalBcastPct is specified, the field named in this input will be compared to the value in TotalBcastPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_TotalBcastPct must be specified if op_TotalBcastPct is specified.
:type val_f_TotalBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_TotalBcastPct: If op_TotalBcastPct is specified, this value will be compared to the value in TotalBcastPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_TotalBcastPct must be specified if op_TotalBcastPct is specified.
:type val_c_TotalBcastPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_TotalDiscardPct: The operator to apply to the field TotalDiscardPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. TotalDiscardPct: The total number of discard packets in each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_TotalDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_TotalDiscardPct: If op_TotalDiscardPct is specified, the field named in this input will be compared to the value in TotalDiscardPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_TotalDiscardPct must be specified if op_TotalDiscardPct is specified.
:type val_f_TotalDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_TotalDiscardPct: If op_TotalDiscardPct is specified, this value will be compared to the value in TotalDiscardPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_TotalDiscardPct must be specified if op_TotalDiscardPct is specified.
:type val_c_TotalDiscardPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_TotalErrorPct: The operator to apply to the field TotalErrorPct. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. TotalErrorPct: The total number of error packets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_TotalErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_TotalErrorPct: If op_TotalErrorPct is specified, the field named in this input will be compared to the value in TotalErrorPct using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_TotalErrorPct must be specified if op_TotalErrorPct is specified.
:type val_f_TotalErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_TotalErrorPct: If op_TotalErrorPct is specified, this value will be compared to the value in TotalErrorPct using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_TotalErrorPct must be specified if op_TotalErrorPct is specified.
:type val_c_TotalErrorPct: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_TotalThru: The operator to apply to the field TotalThru. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. TotalThru: The total number of packets passing through an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_TotalThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_TotalThru: If op_TotalThru is specified, the field named in this input will be compared to the value in TotalThru using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_TotalThru must be specified if op_TotalThru is specified.
:type val_f_TotalThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_TotalThru: If op_TotalThru is specified, this value will be compared to the value in TotalThru using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_TotalThru must be specified if op_TotalThru is specified.
:type val_c_TotalThru: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_TotalUtil: The operator to apply to the field TotalUtil. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. TotalUtil: The total number of utilities used in each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_TotalUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_TotalUtil: If op_TotalUtil is specified, the field named in this input will be compared to the value in TotalUtil using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_TotalUtil must be specified if op_TotalUtil is specified.
:type val_f_TotalUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_TotalUtil: If op_TotalUtil is specified, this value will be compared to the value in TotalUtil using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_TotalUtil must be specified if op_TotalUtil is specified.
:type val_c_TotalUtil: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifAlignmentErrors: The operator to apply to the field ifAlignmentErrors. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifAlignmentErrors: The alignment errors of each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifAlignmentErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifAlignmentErrors: If op_ifAlignmentErrors is specified, the field named in this input will be compared to the value in ifAlignmentErrors using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifAlignmentErrors must be specified if op_ifAlignmentErrors is specified.
:type val_f_ifAlignmentErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifAlignmentErrors: If op_ifAlignmentErrors is specified, this value will be compared to the value in ifAlignmentErrors using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifAlignmentErrors must be specified if op_ifAlignmentErrors is specified.
:type val_c_ifAlignmentErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifFCSErrors: The operator to apply to the field ifFCSErrors. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifFCSErrors: The FCS Errors of each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifFCSErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifFCSErrors: If op_ifFCSErrors is specified, the field named in this input will be compared to the value in ifFCSErrors using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifFCSErrors must be specified if op_ifFCSErrors is specified.
:type val_f_ifFCSErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifFCSErrors: If op_ifFCSErrors is specified, this value will be compared to the value in ifFCSErrors using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifFCSErrors must be specified if op_ifFCSErrors is specified.
:type val_c_ifFCSErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifInBroadcastPkts: The operator to apply to the field ifInBroadcastPkts. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifInBroadcastPkts: The number of incoming broadcast packets of an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifInBroadcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifInBroadcastPkts: If op_ifInBroadcastPkts is specified, the field named in this input will be compared to the value in ifInBroadcastPkts using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifInBroadcastPkts must be specified if op_ifInBroadcastPkts is specified.
:type val_f_ifInBroadcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifInBroadcastPkts: If op_ifInBroadcastPkts is specified, this value will be compared to the value in ifInBroadcastPkts using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifInBroadcastPkts must be specified if op_ifInBroadcastPkts is specified.
:type val_c_ifInBroadcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifInDiscards: The operator to apply to the field ifInDiscards. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifInDiscards: The number of incoming discard packets of an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifInDiscards: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifInDiscards: If op_ifInDiscards is specified, the field named in this input will be compared to the value in ifInDiscards using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifInDiscards must be specified if op_ifInDiscards is specified.
:type val_f_ifInDiscards: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifInDiscards: If op_ifInDiscards is specified, this value will be compared to the value in ifInDiscards using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifInDiscards must be specified if op_ifInDiscards is specified.
:type val_c_ifInDiscards: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifInErrors: The operator to apply to the field ifInErrors. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifInErrors: The number of incoming errors of an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifInErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifInErrors: If op_ifInErrors is specified, the field named in this input will be compared to the value in ifInErrors using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifInErrors must be specified if op_ifInErrors is specified.
:type val_f_ifInErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifInErrors: If op_ifInErrors is specified, this value will be compared to the value in ifInErrors using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifInErrors must be specified if op_ifInErrors is specified.
:type val_c_ifInErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifInMulticastPkts: The operator to apply to the field ifInMulticastPkts. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifInMulticastPkts: The number of incoming multicast packets of an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifInMulticastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifInMulticastPkts: If op_ifInMulticastPkts is specified, the field named in this input will be compared to the value in ifInMulticastPkts using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifInMulticastPkts must be specified if op_ifInMulticastPkts is specified.
:type val_f_ifInMulticastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifInMulticastPkts: If op_ifInMulticastPkts is specified, this value will be compared to the value in ifInMulticastPkts using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifInMulticastPkts must be specified if op_ifInMulticastPkts is specified.
:type val_c_ifInMulticastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifInNUcastPkts: The operator to apply to the field ifInNUcastPkts. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifInNUcastPkts: The number of non unicast packets of local interface daily performance. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifInNUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifInNUcastPkts: If op_ifInNUcastPkts is specified, the field named in this input will be compared to the value in ifInNUcastPkts using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifInNUcastPkts must be specified if op_ifInNUcastPkts is specified.
:type val_f_ifInNUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifInNUcastPkts: If op_ifInNUcastPkts is specified, this value will be compared to the value in ifInNUcastPkts using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifInNUcastPkts must be specified if op_ifInNUcastPkts is specified.
:type val_c_ifInNUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifInOctets: The operator to apply to the field ifInOctets. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifInOctets: The number of incoming octets in interface daily performance. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifInOctets: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifInOctets: If op_ifInOctets is specified, the field named in this input will be compared to the value in ifInOctets using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifInOctets must be specified if op_ifInOctets is specified.
:type val_f_ifInOctets: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifInOctets: If op_ifInOctets is specified, this value will be compared to the value in ifInOctets using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifInOctets must be specified if op_ifInOctets is specified.
:type val_c_ifInOctets: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifInUcastPkts: The operator to apply to the field ifInUcastPkts. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifInUcastPkts: The number of Incoming unicast packets of local interface daily performance. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifInUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifInUcastPkts: If op_ifInUcastPkts is specified, the field named in this input will be compared to the value in ifInUcastPkts using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifInUcastPkts must be specified if op_ifInUcastPkts is specified.
:type val_f_ifInUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifInUcastPkts: If op_ifInUcastPkts is specified, this value will be compared to the value in ifInUcastPkts using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifInUcastPkts must be specified if op_ifInUcastPkts is specified.
:type val_c_ifInUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifIndex: The operator to apply to the field ifIndex. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifIndex: The current index of local interface for the interface daily performance table entry. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifIndex: If op_ifIndex is specified, the field named in this input will be compared to the value in ifIndex using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifIndex must be specified if op_ifIndex is specified.
:type val_f_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifIndex: If op_ifIndex is specified, this value will be compared to the value in ifIndex using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifIndex must be specified if op_ifIndex is specified.
:type val_c_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifLateCollisions: The operator to apply to the field ifLateCollisions. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifLateCollisions: It describes a late collisions of daily performance interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifLateCollisions: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifLateCollisions: If op_ifLateCollisions is specified, the field named in this input will be compared to the value in ifLateCollisions using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifLateCollisions must be specified if op_ifLateCollisions is specified.
:type val_f_ifLateCollisions: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifLateCollisions: If op_ifLateCollisions is specified, this value will be compared to the value in ifLateCollisions using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifLateCollisions must be specified if op_ifLateCollisions is specified.
:type val_c_ifLateCollisions: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifOutBroadcastPkts: The operator to apply to the field ifOutBroadcastPkts. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifOutBroadcastPkts: The outgoing broadcast packets of each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifOutBroadcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifOutBroadcastPkts: If op_ifOutBroadcastPkts is specified, the field named in this input will be compared to the value in ifOutBroadcastPkts using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifOutBroadcastPkts must be specified if op_ifOutBroadcastPkts is specified.
:type val_f_ifOutBroadcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifOutBroadcastPkts: If op_ifOutBroadcastPkts is specified, this value will be compared to the value in ifOutBroadcastPkts using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifOutBroadcastPkts must be specified if op_ifOutBroadcastPkts is specified.
:type val_c_ifOutBroadcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifOutDiscards: The operator to apply to the field ifOutDiscards. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifOutDiscards: The outgoing discarded packets of an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifOutDiscards: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifOutDiscards: If op_ifOutDiscards is specified, the field named in this input will be compared to the value in ifOutDiscards using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifOutDiscards must be specified if op_ifOutDiscards is specified.
:type val_f_ifOutDiscards: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifOutDiscards: If op_ifOutDiscards is specified, this value will be compared to the value in ifOutDiscards using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifOutDiscards must be specified if op_ifOutDiscards is specified.
:type val_c_ifOutDiscards: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifOutErrors: The operator to apply to the field ifOutErrors. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifOutErrors: The outgoing errors of an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifOutErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifOutErrors: If op_ifOutErrors is specified, the field named in this input will be compared to the value in ifOutErrors using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifOutErrors must be specified if op_ifOutErrors is specified.
:type val_f_ifOutErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifOutErrors: If op_ifOutErrors is specified, this value will be compared to the value in ifOutErrors using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifOutErrors must be specified if op_ifOutErrors is specified.
:type val_c_ifOutErrors: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifOutMulticastPkts: The operator to apply to the field ifOutMulticastPkts. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifOutMulticastPkts: The outgoing multicast packets of each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifOutMulticastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifOutMulticastPkts: If op_ifOutMulticastPkts is specified, the field named in this input will be compared to the value in ifOutMulticastPkts using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifOutMulticastPkts must be specified if op_ifOutMulticastPkts is specified.
:type val_f_ifOutMulticastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifOutMulticastPkts: If op_ifOutMulticastPkts is specified, this value will be compared to the value in ifOutMulticastPkts using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifOutMulticastPkts must be specified if op_ifOutMulticastPkts is specified.
:type val_c_ifOutMulticastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifOutNUcastPkts: The operator to apply to the field ifOutNUcastPkts. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifOutNUcastPkts: The outgoing non unicast packets of an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifOutNUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifOutNUcastPkts: If op_ifOutNUcastPkts is specified, the field named in this input will be compared to the value in ifOutNUcastPkts using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifOutNUcastPkts must be specified if op_ifOutNUcastPkts is specified.
:type val_f_ifOutNUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifOutNUcastPkts: If op_ifOutNUcastPkts is specified, this value will be compared to the value in ifOutNUcastPkts using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifOutNUcastPkts must be specified if op_ifOutNUcastPkts is specified.
:type val_c_ifOutNUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifOutOctets: The operator to apply to the field ifOutOctets. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifOutOctets: The number of outgoing octets. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifOutOctets: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifOutOctets: If op_ifOutOctets is specified, the field named in this input will be compared to the value in ifOutOctets using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifOutOctets must be specified if op_ifOutOctets is specified.
:type val_f_ifOutOctets: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifOutOctets: If op_ifOutOctets is specified, this value will be compared to the value in ifOutOctets using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifOutOctets must be specified if op_ifOutOctets is specified.
:type val_c_ifOutOctets: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifOutUcastPkts: The operator to apply to the field ifOutUcastPkts. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifOutUcastPkts: The outgoing unicast packets of an interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifOutUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifOutUcastPkts: If op_ifOutUcastPkts is specified, the field named in this input will be compared to the value in ifOutUcastPkts using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifOutUcastPkts must be specified if op_ifOutUcastPkts is specified.
:type val_f_ifOutUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifOutUcastPkts: If op_ifOutUcastPkts is specified, this value will be compared to the value in ifOutUcastPkts using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifOutUcastPkts must be specified if op_ifOutUcastPkts is specified.
:type val_c_ifOutUcastPkts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifTotalChanges: The operator to apply to the field ifTotalChanges. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifTotalChanges: The total number of changes occurs in each interface. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifTotalChanges: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifTotalChanges: If op_ifTotalChanges is specified, the field named in this input will be compared to the value in ifTotalChanges using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifTotalChanges must be specified if op_ifTotalChanges is specified.
:type val_f_ifTotalChanges: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifTotalChanges: If op_ifTotalChanges is specified, this value will be compared to the value in ifTotalChanges using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifTotalChanges must be specified if op_ifTotalChanges is specified.
:type val_c_ifTotalChanges: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` today
:param starttime: The data returned will represent the if perf dailies with this date and time as lower boundary. If omitted, the result will indicate the most recently collected data.
:type starttime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` tomorrow
:param endtime: The data returned will represent the if perf dailies with this date and time as upper boundary. If omitted, the result will indicate the most recently collected data.
:type endtime: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` DeviceID
:param sort: The data field(s) to use for sorting the output. Default is DeviceID. Valid values are DataSourceID, StartTime, EndTime, DeviceID, ifIndex, ifTotalChanges, ifInOctets, ifInUcastPkts, ifInNUcastPkts, ifInMulticastPkts, ifInBroadcastPkts, ifInDiscards, ifInErrors, ifOutOctets, ifOutUcastPkts, ifOutNUcastPkts, ifOutMulticastPkts, ifOutBroadcastPkts, ifOutDiscards, ifOutErrors, ifAlignmentErrors, ifFCSErrors, ifLateCollisions, InThru, OutThru, TotalThru, InUtil, OutUtil, TotalUtil, InErrorPct, OutErrorPct, TotalErrorPct, InBcastPct, OutBcastPct, TotalBcastPct, InDiscardPct, OutDiscardPct, TotalDiscardPct.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each IfPerfDaily. Valid values are DataSourceID, StartTime, EndTime, DeviceID, ifIndex, ifTotalChanges, ifInOctets, ifInUcastPkts, ifInNUcastPkts, ifInMulticastPkts, ifInBroadcastPkts, ifInDiscards, ifInErrors, ifOutOctets, ifOutUcastPkts, ifOutNUcastPkts, ifOutMulticastPkts, ifOutBroadcastPkts, ifOutDiscards, ifOutErrors, ifAlignmentErrors, ifFCSErrors, ifLateCollisions, InThru, OutThru, TotalThru, InUtil, OutUtil, TotalUtil, InErrorPct, OutErrorPct, TotalErrorPct, InBcastPct, OutBcastPct, TotalBcastPct, InDiscardPct, OutDiscardPct, TotalDiscardPct. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return if_perf_dailies: An array of the IfPerfDaily objects that match the specified input criteria.
:rtype if_perf_dailies: Array of IfPerfDaily
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
| 56.571122
| 934
| 0.618861
| 12,873
| 105,392
| 5.009788
| 0.025324
| 0.070708
| 0.04596
| 0.07815
| 0.968724
| 0.967546
| 0.934161
| 0.915523
| 0.909553
| 0.907925
| 0
| 0.00424
| 0.299539
| 105,392
| 1,862
| 935
| 56.601504
| 0.86935
| 0.838574
| 0
| 0
| 0
| 0
| 0.06424
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.111111
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
100fb0d2c81f5248a68531dd4f6ce6ffb5691fb5
| 7,657
|
py
|
Python
|
models.py
|
FDKevin0/Micro-Expression-with-Deep-Learning
|
617a359f264a4ccc4b6c5b1eb68c56b08d9cc397
|
[
"BSD-3-Clause-Attribution"
] | null | null | null |
models.py
|
FDKevin0/Micro-Expression-with-Deep-Learning
|
617a359f264a4ccc4b6c5b1eb68c56b08d9cc397
|
[
"BSD-3-Clause-Attribution"
] | null | null | null |
models.py
|
FDKevin0/Micro-Expression-with-Deep-Learning
|
617a359f264a4ccc4b6c5b1eb68c56b08d9cc397
|
[
"BSD-3-Clause-Attribution"
] | null | null | null |
from tensorflow.keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D
from tensorflow.keras.layers import Flatten, Dense, Dropout
from tensorflow.keras.layers import LSTM, UpSampling2D
from tensorflow.keras.models import Sequential
def VGG_16_4_channels(spatial_size, classes, channels, channel_first=True, weights_path=None):
model = Sequential()
if channel_first:
model.add(ZeroPadding2D((1,1),input_shape=(channels, spatial_size, spatial_size)))
else:
model.add(ZeroPadding2D((1,1),input_shape=(spatial_size, spatial_size, channels)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2))) # 33
model.add(Flatten())
model.add(Dense(4096, activation='relu')) # 34
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu')) # 35
model.add(Dropout(0.5))
model.add(Dense(2622, activation='softmax')) # Dropped
if weights_path:
model.load_weights(weights_path)
model.pop()
model.add(Dense(classes, activation='softmax')) # 36
return model
def VGG_16(spatial_size, classes, channels, channel_first=True, weights_path=None):
model = Sequential()
if channel_first:
model.add(ZeroPadding2D((1,1),input_shape=(channels, spatial_size, spatial_size)))
else:
model.add(ZeroPadding2D((1,1),input_shape=(spatial_size, spatial_size, channels)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2))) # 33
model.add(Flatten())
model.add(Dense(4096, activation='relu')) # 34
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu')) # 35
model.add(Dropout(0.5))
model.add(Dense(2622, activation='softmax')) # Dropped
if weights_path:
model.load_weights(weights_path)
model.pop()
model.add(Dense(classes, activation='softmax')) # 36
return model
def temporal_module(data_dim, timesteps_TIM, classes, weights_path=None):
model = Sequential()
model.add(LSTM(3000, return_sequences=False, input_shape=(timesteps_TIM, data_dim)))
#model.add(LSTM(3000, return_sequences=False))
model.add(Dense(128, activation='relu'))
model.add(Dense(classes, activation='sigmoid'))
if weights_path:
model.load_weights(weights_path)
return model
def convolutional_autoencoder(classes, spatial_size, channel_first=True, weights_path=None):
model = Sequential()
# encoder
if channel_first:
model.add(Conv2D(128, (3, 3), activation='relu', input_shape=(3, spatial_size, spatial_size), padding='same'))
else:
model.add(Conv2D(128, (3, 3), activation='relu', input_shape=(spatial_size, spatial_size, 3), padding='same'))
model.add(MaxPooling2D( pool_size=(2, 2), strides=2, padding='same'))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D( pool_size=(2, 2), strides=2, padding='same'))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(MaxPooling2D( pool_size=(2, 2), strides=2, padding='same'))
# decoder
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(UpSampling2D(2))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(UpSampling2D(2))
model.add(Conv2D(128, (3, 3), activation='relu', padding='same'))
model.add(UpSampling2D(2))
model.add(Conv2D(3, (3, 3), activation='sigmoid', padding='same'))
return model
def VGG_16_tim(spatial_size, classes, channels, channel_first=True, weights_path=None):
model = Sequential()
if channel_first:
model.add(ZeroPadding2D((1,1),input_shape=(channels, spatial_size, spatial_size)))
else:
model.add(ZeroPadding2D((1,1),input_shape=(spatial_size, spatial_size, channels)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Conv2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2))) # 33
model.add(Flatten())
model.add(Dense(4096, activation='relu')) # 34
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu')) # 35
model.add(Dropout(0.5))
model.add(Dense(2622, activation='softmax')) # Dropped
if weights_path:
model.load_weights(weights_path)
model.pop()
model.add(Dense(classes, activation='softmax')) # 36
return model
| 34.490991
| 112
| 0.698446
| 1,151
| 7,657
| 4.584709
| 0.065161
| 0.204662
| 0.124692
| 0.139473
| 0.929316
| 0.893121
| 0.888952
| 0.875308
| 0.858442
| 0.858442
| 0
| 0.084209
| 0.092726
| 7,657
| 221
| 113
| 34.647059
| 0.675399
| 0.015672
| 0
| 0.906977
| 0
| 0
| 0.0415
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02907
| false
| 0
| 0.023256
| 0
| 0.081395
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
12352be125d6557f692dde0b2100ae201693defd
| 6,673
|
py
|
Python
|
python-backend/tests/parties/party_appt/resources/test_mine_party_appt_mm_overlap.py
|
MaxWardle/mds
|
15d8405e6e95af98da9588f353c5d6692d1aa3d6
|
[
"Apache-2.0"
] | null | null | null |
python-backend/tests/parties/party_appt/resources/test_mine_party_appt_mm_overlap.py
|
MaxWardle/mds
|
15d8405e6e95af98da9588f353c5d6692d1aa3d6
|
[
"Apache-2.0"
] | null | null | null |
python-backend/tests/parties/party_appt/resources/test_mine_party_appt_mm_overlap.py
|
MaxWardle/mds
|
15d8405e6e95af98da9588f353c5d6692d1aa3d6
|
[
"Apache-2.0"
] | null | null | null |
import json, uuid, pytest
from datetime import date, timedelta
from tests.constants import TEST_MINE_PARTY_APPT_GUID, TEST_MINE_GUID, TEST_PARTY_PER_GUID_1, TEST_MINE_PARTY_APPT_TYPE_CODE2, TEST_PARTY_PER_FIRST_NAME_1, TEST_PARTY_PER_PARTY_NAME_1, TEST_MINE_PARTY_APPT_TYPE_CODE1, TEST_TAILINGS_STORAGE_FACILITY_GUID1, DUMMY_USER_KWARGS
from app.api.parties.party_appt.models.mine_party_appt import MinePartyAppointment
from app.extensions import db
MM_APPT_LENGTH = timedelta(days=14)
INIT_START_DATE = date(2000, 1, 1)
INIT_END_DATE = INIT_START_DATE + MM_APPT_LENGTH
@pytest.fixture(scope="function")
def setup_info(test_client):
mine_manager_1 = MinePartyAppointment(
mine_guid=uuid.UUID(TEST_MINE_GUID),
party_guid=uuid.UUID(TEST_PARTY_PER_GUID_1),
mine_party_appt_type_code='MMG',
start_date=INIT_START_DATE,
end_date=INIT_END_DATE,
processed_by=DUMMY_USER_KWARGS.get('update_user'),
**DUMMY_USER_KWARGS)
mine_manager_1.save()
mine_manager_2 = MinePartyAppointment(
mine_guid=uuid.UUID(TEST_MINE_GUID),
party_guid=uuid.UUID(TEST_PARTY_PER_GUID_1),
mine_party_appt_type_code='MMG',
start_date=INIT_START_DATE + timedelta(days=500),
end_date=INIT_END_DATE + timedelta(days=500),
processed_by=DUMMY_USER_KWARGS.get('update_user'),
**DUMMY_USER_KWARGS)
mine_manager_2.save()
yield dict(mine_manager_1=mine_manager_1, mine_manager_2=mine_manager_2)
db.session.delete(mine_manager_1)
db.session.delete(mine_manager_2)
db.session.commit()
#POST
def test_post_mine_manager_happy_before(test_client, auth_headers, setup_info):
test_data = {
'mine_guid': TEST_MINE_GUID,
'party_guid': TEST_PARTY_PER_GUID_1,
'mine_party_appt_type_code': "MMG",
'start_date': str(INIT_START_DATE - MM_APPT_LENGTH - timedelta(days=1)),
'end_date': str(INIT_END_DATE - MM_APPT_LENGTH - timedelta(days=1)),
}
post_resp = test_client.post(
'/parties/mines', data=test_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 200, post_resp.response
#clean-up
new_mpa = MinePartyAppointment.find_by_mine_party_appt_guid(post_data["mine_party_appt_guid"])
db.session.delete(new_mpa)
db.session.commit()
def test_post_mine_manager_happy_after(test_client, auth_headers, setup_info):
test_data = {
'mine_guid': TEST_MINE_GUID,
'party_guid': TEST_PARTY_PER_GUID_1,
'mine_party_appt_type_code': "MMG",
'start_date': str(INIT_START_DATE + MM_APPT_LENGTH + timedelta(days=1)),
'end_date': str(INIT_END_DATE + MM_APPT_LENGTH + timedelta(days=1)),
}
post_resp = test_client.post(
'/parties/mines', data=test_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 200, post_resp.response
#clean-up
new_mpa = MinePartyAppointment.find_by_mine_party_appt_guid(post_data["mine_party_appt_guid"])
db.session.delete(new_mpa)
db.session.commit()
def test_post_mine_manager_overlap_one_day_start(test_client, auth_headers, setup_info):
test_data = {
'mine_guid': TEST_MINE_GUID,
'party_guid': TEST_PARTY_PER_GUID_1,
'mine_party_appt_type_code': "MMG",
'start_date': str(INIT_START_DATE - MM_APPT_LENGTH - timedelta(days=1)),
'end_date': str(INIT_END_DATE - MM_APPT_LENGTH), #same day as existing
}
post_resp = test_client.post(
'/parties/mines', data=test_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 500, post_resp.response
def test_post_mine_manager_overlap_one_day_end(test_client, auth_headers, setup_info):
test_data = {
'mine_guid': TEST_MINE_GUID,
'party_guid': TEST_PARTY_PER_GUID_1,
'mine_party_appt_type_code': "MMG",
'start_date': str(INIT_START_DATE + MM_APPT_LENGTH), #same day as existing
'end_date': str(INIT_END_DATE + MM_APPT_LENGTH + timedelta(days=1)),
}
post_resp = test_client.post(
'/parties/mines', data=test_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 500, post_resp.response
#PUT
def test_put_mine_manager_happy_before(test_client, auth_headers, setup_info):
test_data = {
'start_date': str(INIT_START_DATE - MM_APPT_LENGTH - timedelta(days=1)),
'end_date': str(INIT_END_DATE - MM_APPT_LENGTH - timedelta(days=1)),
}
put_resp = test_client.put(
'/parties/mines/' + str(setup_info["mine_manager_2"].mine_party_appt_guid),
data=test_data,
headers=auth_headers['full_auth_header'])
put_data = json.loads(put_resp.data.decode())
assert put_resp.status_code == 200, put_resp.response
def test_put_mine_manager_happy_after(test_client, auth_headers, setup_info):
test_data = {
'start_date': str(INIT_START_DATE + MM_APPT_LENGTH + timedelta(days=1)),
'end_date': str(INIT_END_DATE + MM_APPT_LENGTH + timedelta(days=1)),
}
put_resp = test_client.put(
f'/parties/mines/{setup_info["mine_manager_2"].mine_party_appt_guid}',
data=test_data,
headers=auth_headers['full_auth_header'])
put_data = json.loads(put_resp.data.decode())
assert put_resp.status_code == 200, put_resp.response
def test_put_mine_manager_overlap_one_day_start(test_client, auth_headers, setup_info):
test_data = {
'start_date': str(INIT_START_DATE - MM_APPT_LENGTH - timedelta(days=1)),
'end_date': str(INIT_END_DATE - MM_APPT_LENGTH),
}
put_resp = test_client.put(
'/parties/mines/' + str(setup_info["mine_manager_2"].mine_party_appt_guid),
data=test_data,
headers=auth_headers['full_auth_header'])
put_data = json.loads(put_resp.data.decode())
assert put_resp.status_code == 500, put_resp.response
def test_put_mine_manager_overlap_one_day_end(test_client, auth_headers, setup_info):
test_data = {
'start_date': str(INIT_START_DATE + MM_APPT_LENGTH), #same day as existing
'end_date': str(INIT_END_DATE + MM_APPT_LENGTH + timedelta(days=1)),
}
put_resp = test_client.put(
'/parties/mines/' + str(setup_info["mine_manager_2"].mine_party_appt_guid),
data=test_data,
headers=auth_headers['full_auth_header'])
put_data = json.loads(put_resp.data.decode())
assert put_resp.status_code == 500, put_resp.response
| 42.503185
| 273
| 0.72606
| 991
| 6,673
| 4.423814
| 0.097881
| 0.045164
| 0.053376
| 0.062044
| 0.888914
| 0.850137
| 0.825046
| 0.825046
| 0.818659
| 0.818659
| 0
| 0.013662
| 0.166342
| 6,673
| 157
| 274
| 42.503185
| 0.774402
| 0.012438
| 0
| 0.707692
| 0
| 0
| 0.113153
| 0.025213
| 0
| 0
| 0
| 0
| 0.061538
| 1
| 0.069231
| false
| 0
| 0.038462
| 0
| 0.107692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d61028b5c9edb5890f6b62f355a23600734c0df6
| 166
|
py
|
Python
|
HARK/BayerLuetticke/Assets/One/__init__.py
|
cohenimhuji/HARK
|
bb8549105ab979f853bd413d694f4a9b6572554e
|
[
"Apache-2.0"
] | null | null | null |
HARK/BayerLuetticke/Assets/One/__init__.py
|
cohenimhuji/HARK
|
bb8549105ab979f853bd413d694f4a9b6572554e
|
[
"Apache-2.0"
] | null | null | null |
HARK/BayerLuetticke/Assets/One/__init__.py
|
cohenimhuji/HARK
|
bb8549105ab979f853bd413d694f4a9b6572554e
|
[
"Apache-2.0"
] | null | null | null |
from .FluctuationsOneAssetIOUs import *
from .FluctuationsOneAssetIOUsBond import *
from .SteadyStateOneAssetIOUs import *
from .SteadyStateOneAssetIOUsBond import *
| 33.2
| 43
| 0.855422
| 12
| 166
| 11.833333
| 0.5
| 0.211268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 166
| 4
| 44
| 41.5
| 0.946667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d61ac718df5606dda0dcd1c91e750bea159990f2
| 642
|
py
|
Python
|
tests/functions.py
|
jeertmans/checktype
|
3f374964aae1388b7faa9c9ce7fde7dd8bc71e75
|
[
"MIT"
] | null | null | null |
tests/functions.py
|
jeertmans/checktype
|
3f374964aae1388b7faa9c9ce7fde7dd8bc71e75
|
[
"MIT"
] | null | null | null |
tests/functions.py
|
jeertmans/checktype
|
3f374964aae1388b7faa9c9ce7fde7dd8bc71e75
|
[
"MIT"
] | null | null | null |
def f_mul(a, b):
return a * b * 0.5
def f_mul_int_typed(a: int, b: int) -> float:
return f_mul(a, b)
def f_mul_int_missing_one(a: int, b) -> float:
return f_mul(a, b)
def f_mul_int_missing_two(a, b) -> float:
return f_mul(a, b)
def f_mul_int_missing_all(a, b):
return f_mul(a, b)
def f_mul_int_typed_kwd(a: int, b: int = 0) -> float:
return f_mul(a, b)
__f_mul_int_typed_from_string__ = None
__f_mul_int_typed_code__ = """def __f_mul_int_typed_from_string__(
a: int, b:int) -> float:
return f_mul(a, b)
"""
exec(__f_mul_int_typed_code__)
f_mul_int_typed_from_string = __f_mul_int_typed_from_string__
| 18.882353
| 66
| 0.697819
| 131
| 642
| 2.854962
| 0.160305
| 0.192513
| 0.205882
| 0.256684
| 0.852941
| 0.705882
| 0.425134
| 0.425134
| 0.425134
| 0.36631
| 0
| 0.005703
| 0.180685
| 642
| 33
| 67
| 19.454545
| 0.705323
| 0
| 0
| 0.315789
| 0
| 0
| 0.132399
| 0.049844
| 0
| 0
| 0
| 0
| 0
| 1
| 0.315789
| false
| 0
| 0
| 0.315789
| 0.684211
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c39c20aedf29823799615c5f498b1461f2bebeab
| 37,201
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/forwarding_rule.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/compute/forwarding_rule.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/compute/forwarding_rule.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
__all__ = ['ForwardingRule']
class ForwardingRule(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
all_ports: Optional[pulumi.Input[bool]] = None,
allow_global_access: Optional[pulumi.Input[bool]] = None,
backend_service: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
is_mirroring_collector: Optional[pulumi.Input[bool]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
load_balancing_scheme: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
network_tier: Optional[pulumi.Input[str]] = None,
port_range: Optional[pulumi.Input[str]] = None,
ports: Optional[pulumi.Input[List[pulumi.Input[str]]]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
service_label: Optional[pulumi.Input[str]] = None,
subnetwork: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A ForwardingRule resource. A ForwardingRule resource specifies which pool
of target virtual machines to forward a packet to if it matches the given
[IPAddress, IPProtocol, portRange] tuple.
To get more information about ForwardingRule, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/v1/forwardingRules)
* How-to Guides
* [Official Documentation](https://cloud.google.com/compute/docs/load-balancing/network/forwarding-rules)
## Example Usage
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] all_ports: For internal TCP/UDP load balancing (i.e. load balancing scheme is
INTERNAL and protocol is TCP/UDP), set this to true to allow packets
addressed to any ports to be forwarded to the backends configured
with this forwarding rule. Used with backend service. Cannot be set
if port or portRange are set.
:param pulumi.Input[bool] allow_global_access: If true, clients can access ILB from all regions.
Otherwise only allows from the local region the ILB is located at.
:param pulumi.Input[str] backend_service: A BackendService to receive the matched traffic. This is used only
for INTERNAL load balancing.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when
you create the resource.
:param pulumi.Input[str] ip_address: The IP address that this forwarding rule is serving on behalf of.
Addresses are restricted based on the forwarding rule's load balancing
scheme (EXTERNAL or INTERNAL) and scope (global or regional).
When the load balancing scheme is EXTERNAL, for global forwarding
rules, the address must be a global IP, and for regional forwarding
rules, the address must live in the same region as the forwarding
rule. If this field is empty, an ephemeral IPv4 address from the same
scope (global or regional) will be assigned. A regional forwarding
rule supports IPv4 only. A global forwarding rule supports either IPv4
or IPv6.
When the load balancing scheme is INTERNAL, this can only be an RFC
1918 IP address belonging to the network/subnet configured for the
forwarding rule. By default, if this field is empty, an ephemeral
internal IP address will be automatically allocated from the IP range
of the subnet or network configured for this forwarding rule.
An address must be specified by a literal IP address. > **NOTE:** While
the API allows you to specify various resource paths for an address resource
instead, this provider requires this to specifically be an IP address to
avoid needing to fetching the IP address from resource paths on refresh
or unnecessary diffs.
:param pulumi.Input[str] ip_protocol: The IP protocol to which this rule applies.
When the load balancing scheme is INTERNAL, only TCP and UDP are
valid.
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
:param pulumi.Input[bool] is_mirroring_collector: Indicates whether or not this load balancer can be used
as a collector for packet mirroring. To prevent mirroring loops,
instances behind this load balancer will not have their traffic
mirrored even if a PacketMirroring rule applies to them. This
can only be set to true for load balancers that have their
loadBalancingScheme set to INTERNAL.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this forwarding rule. A list of key->value pairs.
:param pulumi.Input[str] load_balancing_scheme: This signifies what the ForwardingRule will be used for and can be
EXTERNAL, INTERNAL, or INTERNAL_MANAGED. EXTERNAL is used for Classic
Cloud VPN gateways, protocol forwarding to VMs from an external IP address,
and HTTP(S), SSL Proxy, TCP Proxy, and Network TCP/UDP load balancers.
INTERNAL is used for protocol forwarding to VMs from an internal IP address,
and internal TCP/UDP load balancers.
INTERNAL_MANAGED is used for internal HTTP(S) load balancers.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL`, `INTERNAL`, and `INTERNAL_MANAGED`.
:param pulumi.Input[str] name: Name of the resource; provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] network: For internal load balancing, this field identifies the network that
the load balanced IP should belong to for this Forwarding Rule. If
this field is not specified, the default network will be used.
This field is only used for INTERNAL load balancing.
:param pulumi.Input[str] network_tier: The networking tier used for configuring this address. If this field is not
specified, it is assumed to be PREMIUM.
Possible values are `PREMIUM` and `STANDARD`.
:param pulumi.Input[str] port_range: This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
:param pulumi.Input[List[pulumi.Input[str]]] ports: This field is used along with the backend_service field for internal
load balancing.
When the load balancing scheme is INTERNAL, a single port or a comma
separated list of ports can be configured. Only packets addressed to
these ports will be forwarded to the backends configured with this
forwarding rule.
You may specify a maximum of up to 5 ports.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] region: A reference to the region where the regional forwarding rule resides.
This field is not applicable to global forwarding rules.
:param pulumi.Input[str] service_label: An optional prefix to the service name for this Forwarding Rule.
If specified, will be the first label of the fully qualified service
name.
The label must be 1-63 characters long, and comply with RFC1035.
Specifically, the label must be 1-63 characters long and match the
regular expression `a-z?` which means the first
character must be a lowercase letter, and all following characters
must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
This field is only used for INTERNAL load balancing.
:param pulumi.Input[str] subnetwork: The subnetwork that the load balanced IP should belong to for this
Forwarding Rule. This field is only used for INTERNAL load balancing.
If the network specified is in auto subnet mode, this field is
optional. However, if the network is in custom subnet mode, a
subnetwork must be specified.
:param pulumi.Input[str] target: The URL of the target resource to receive the matched traffic.
The target must live in the same region as the forwarding rule.
The forwarded traffic must be of a type appropriate to the target
object.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['all_ports'] = all_ports
__props__['allow_global_access'] = allow_global_access
__props__['backend_service'] = backend_service
__props__['description'] = description
__props__['ip_address'] = ip_address
__props__['ip_protocol'] = ip_protocol
__props__['is_mirroring_collector'] = is_mirroring_collector
__props__['labels'] = labels
__props__['load_balancing_scheme'] = load_balancing_scheme
__props__['name'] = name
__props__['network'] = network
__props__['network_tier'] = network_tier
__props__['port_range'] = port_range
__props__['ports'] = ports
__props__['project'] = project
__props__['region'] = region
__props__['service_label'] = service_label
__props__['subnetwork'] = subnetwork
__props__['target'] = target
__props__['creation_timestamp'] = None
__props__['label_fingerprint'] = None
__props__['self_link'] = None
__props__['service_name'] = None
super(ForwardingRule, __self__).__init__(
'gcp:compute/forwardingRule:ForwardingRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
all_ports: Optional[pulumi.Input[bool]] = None,
allow_global_access: Optional[pulumi.Input[bool]] = None,
backend_service: Optional[pulumi.Input[str]] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_protocol: Optional[pulumi.Input[str]] = None,
is_mirroring_collector: Optional[pulumi.Input[bool]] = None,
label_fingerprint: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
load_balancing_scheme: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
network_tier: Optional[pulumi.Input[str]] = None,
port_range: Optional[pulumi.Input[str]] = None,
ports: Optional[pulumi.Input[List[pulumi.Input[str]]]] = None,
project: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
service_label: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
subnetwork: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[str]] = None) -> 'ForwardingRule':
"""
Get an existing ForwardingRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] all_ports: For internal TCP/UDP load balancing (i.e. load balancing scheme is
INTERNAL and protocol is TCP/UDP), set this to true to allow packets
addressed to any ports to be forwarded to the backends configured
with this forwarding rule. Used with backend service. Cannot be set
if port or portRange are set.
:param pulumi.Input[bool] allow_global_access: If true, clients can access ILB from all regions.
Otherwise only allows from the local region the ILB is located at.
:param pulumi.Input[str] backend_service: A BackendService to receive the matched traffic. This is used only
for INTERNAL load balancing.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when
you create the resource.
:param pulumi.Input[str] ip_address: The IP address that this forwarding rule is serving on behalf of.
Addresses are restricted based on the forwarding rule's load balancing
scheme (EXTERNAL or INTERNAL) and scope (global or regional).
When the load balancing scheme is EXTERNAL, for global forwarding
rules, the address must be a global IP, and for regional forwarding
rules, the address must live in the same region as the forwarding
rule. If this field is empty, an ephemeral IPv4 address from the same
scope (global or regional) will be assigned. A regional forwarding
rule supports IPv4 only. A global forwarding rule supports either IPv4
or IPv6.
When the load balancing scheme is INTERNAL, this can only be an RFC
1918 IP address belonging to the network/subnet configured for the
forwarding rule. By default, if this field is empty, an ephemeral
internal IP address will be automatically allocated from the IP range
of the subnet or network configured for this forwarding rule.
An address must be specified by a literal IP address. > **NOTE:** While
the API allows you to specify various resource paths for an address resource
instead, this provider requires this to specifically be an IP address to
avoid needing to fetching the IP address from resource paths on refresh
or unnecessary diffs.
:param pulumi.Input[str] ip_protocol: The IP protocol to which this rule applies.
When the load balancing scheme is INTERNAL, only TCP and UDP are
valid.
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
:param pulumi.Input[bool] is_mirroring_collector: Indicates whether or not this load balancer can be used
as a collector for packet mirroring. To prevent mirroring loops,
instances behind this load balancer will not have their traffic
mirrored even if a PacketMirroring rule applies to them. This
can only be set to true for load balancers that have their
loadBalancingScheme set to INTERNAL.
:param pulumi.Input[str] label_fingerprint: The fingerprint used for optimistic locking of this resource. Used internally during updates.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this forwarding rule. A list of key->value pairs.
:param pulumi.Input[str] load_balancing_scheme: This signifies what the ForwardingRule will be used for and can be
EXTERNAL, INTERNAL, or INTERNAL_MANAGED. EXTERNAL is used for Classic
Cloud VPN gateways, protocol forwarding to VMs from an external IP address,
and HTTP(S), SSL Proxy, TCP Proxy, and Network TCP/UDP load balancers.
INTERNAL is used for protocol forwarding to VMs from an internal IP address,
and internal TCP/UDP load balancers.
INTERNAL_MANAGED is used for internal HTTP(S) load balancers.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL`, `INTERNAL`, and `INTERNAL_MANAGED`.
:param pulumi.Input[str] name: Name of the resource; provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] network: For internal load balancing, this field identifies the network that
the load balanced IP should belong to for this Forwarding Rule. If
this field is not specified, the default network will be used.
This field is only used for INTERNAL load balancing.
:param pulumi.Input[str] network_tier: The networking tier used for configuring this address. If this field is not
specified, it is assumed to be PREMIUM.
Possible values are `PREMIUM` and `STANDARD`.
:param pulumi.Input[str] port_range: This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
:param pulumi.Input[List[pulumi.Input[str]]] ports: This field is used along with the backend_service field for internal
load balancing.
When the load balancing scheme is INTERNAL, a single port or a comma
separated list of ports can be configured. Only packets addressed to
these ports will be forwarded to the backends configured with this
forwarding rule.
You may specify a maximum of up to 5 ports.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] region: A reference to the region where the regional forwarding rule resides.
This field is not applicable to global forwarding rules.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[str] service_label: An optional prefix to the service name for this Forwarding Rule.
If specified, will be the first label of the fully qualified service
name.
The label must be 1-63 characters long, and comply with RFC1035.
Specifically, the label must be 1-63 characters long and match the
regular expression `a-z?` which means the first
character must be a lowercase letter, and all following characters
must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
This field is only used for INTERNAL load balancing.
:param pulumi.Input[str] service_name: The internal fully qualified service name for this Forwarding Rule. This field is only used for INTERNAL load balancing.
:param pulumi.Input[str] subnetwork: The subnetwork that the load balanced IP should belong to for this
Forwarding Rule. This field is only used for INTERNAL load balancing.
If the network specified is in auto subnet mode, this field is
optional. However, if the network is in custom subnet mode, a
subnetwork must be specified.
:param pulumi.Input[str] target: The URL of the target resource to receive the matched traffic.
The target must live in the same region as the forwarding rule.
The forwarded traffic must be of a type appropriate to the target
object.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["all_ports"] = all_ports
__props__["allow_global_access"] = allow_global_access
__props__["backend_service"] = backend_service
__props__["creation_timestamp"] = creation_timestamp
__props__["description"] = description
__props__["ip_address"] = ip_address
__props__["ip_protocol"] = ip_protocol
__props__["is_mirroring_collector"] = is_mirroring_collector
__props__["label_fingerprint"] = label_fingerprint
__props__["labels"] = labels
__props__["load_balancing_scheme"] = load_balancing_scheme
__props__["name"] = name
__props__["network"] = network
__props__["network_tier"] = network_tier
__props__["port_range"] = port_range
__props__["ports"] = ports
__props__["project"] = project
__props__["region"] = region
__props__["self_link"] = self_link
__props__["service_label"] = service_label
__props__["service_name"] = service_name
__props__["subnetwork"] = subnetwork
__props__["target"] = target
return ForwardingRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allPorts")
def all_ports(self) -> pulumi.Output[Optional[bool]]:
"""
For internal TCP/UDP load balancing (i.e. load balancing scheme is
INTERNAL and protocol is TCP/UDP), set this to true to allow packets
addressed to any ports to be forwarded to the backends configured
with this forwarding rule. Used with backend service. Cannot be set
if port or portRange are set.
"""
return pulumi.get(self, "all_ports")
@property
@pulumi.getter(name="allowGlobalAccess")
def allow_global_access(self) -> pulumi.Output[Optional[bool]]:
"""
If true, clients can access ILB from all regions.
Otherwise only allows from the local region the ILB is located at.
"""
return pulumi.get(self, "allow_global_access")
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> pulumi.Output[Optional[str]]:
"""
A BackendService to receive the matched traffic. This is used only
for INTERNAL load balancing.
"""
return pulumi.get(self, "backend_service")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> pulumi.Output[str]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional description of this resource. Provide this property when
you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Output[str]:
"""
The IP address that this forwarding rule is serving on behalf of.
Addresses are restricted based on the forwarding rule's load balancing
scheme (EXTERNAL or INTERNAL) and scope (global or regional).
When the load balancing scheme is EXTERNAL, for global forwarding
rules, the address must be a global IP, and for regional forwarding
rules, the address must live in the same region as the forwarding
rule. If this field is empty, an ephemeral IPv4 address from the same
scope (global or regional) will be assigned. A regional forwarding
rule supports IPv4 only. A global forwarding rule supports either IPv4
or IPv6.
When the load balancing scheme is INTERNAL, this can only be an RFC
1918 IP address belonging to the network/subnet configured for the
forwarding rule. By default, if this field is empty, an ephemeral
internal IP address will be automatically allocated from the IP range
of the subnet or network configured for this forwarding rule.
An address must be specified by a literal IP address. > **NOTE:** While
the API allows you to specify various resource paths for an address resource
instead, this provider requires this to specifically be an IP address to
avoid needing to fetching the IP address from resource paths on refresh
or unnecessary diffs.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="ipProtocol")
def ip_protocol(self) -> pulumi.Output[str]:
"""
The IP protocol to which this rule applies.
When the load balancing scheme is INTERNAL, only TCP and UDP are
valid.
Possible values are `TCP`, `UDP`, `ESP`, `AH`, `SCTP`, and `ICMP`.
"""
return pulumi.get(self, "ip_protocol")
@property
@pulumi.getter(name="isMirroringCollector")
def is_mirroring_collector(self) -> pulumi.Output[Optional[bool]]:
"""
Indicates whether or not this load balancer can be used
as a collector for packet mirroring. To prevent mirroring loops,
instances behind this load balancer will not have their traffic
mirrored even if a PacketMirroring rule applies to them. This
can only be set to true for load balancers that have their
loadBalancingScheme set to INTERNAL.
"""
return pulumi.get(self, "is_mirroring_collector")
@property
@pulumi.getter(name="labelFingerprint")
def label_fingerprint(self) -> pulumi.Output[str]:
"""
The fingerprint used for optimistic locking of this resource. Used internally during updates.
"""
return pulumi.get(self, "label_fingerprint")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Labels to apply to this forwarding rule. A list of key->value pairs.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="loadBalancingScheme")
def load_balancing_scheme(self) -> pulumi.Output[Optional[str]]:
"""
This signifies what the ForwardingRule will be used for and can be
EXTERNAL, INTERNAL, or INTERNAL_MANAGED. EXTERNAL is used for Classic
Cloud VPN gateways, protocol forwarding to VMs from an external IP address,
and HTTP(S), SSL Proxy, TCP Proxy, and Network TCP/UDP load balancers.
INTERNAL is used for protocol forwarding to VMs from an internal IP address,
and internal TCP/UDP load balancers.
INTERNAL_MANAGED is used for internal HTTP(S) load balancers.
Default value is `EXTERNAL`.
Possible values are `EXTERNAL`, `INTERNAL`, and `INTERNAL_MANAGED`.
"""
return pulumi.get(self, "load_balancing_scheme")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource; provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> pulumi.Output[str]:
"""
For internal load balancing, this field identifies the network that
the load balanced IP should belong to for this Forwarding Rule. If
this field is not specified, the default network will be used.
This field is only used for INTERNAL load balancing.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="networkTier")
def network_tier(self) -> pulumi.Output[str]:
"""
The networking tier used for configuring this address. If this field is not
specified, it is assumed to be PREMIUM.
Possible values are `PREMIUM` and `STANDARD`.
"""
return pulumi.get(self, "network_tier")
@property
@pulumi.getter(name="portRange")
def port_range(self) -> pulumi.Output[Optional[str]]:
"""
This field is used along with the target field for TargetHttpProxy,
TargetHttpsProxy, TargetSslProxy, TargetTcpProxy, TargetVpnGateway,
TargetPool, TargetInstance.
Applicable only when IPProtocol is TCP, UDP, or SCTP, only packets
addressed to ports in the specified range will be forwarded to target.
Forwarding rules with the same [IPAddress, IPProtocol] pair must have
disjoint port ranges.
Some types of forwarding target have constraints on the acceptable
ports:
* TargetHttpProxy: 80, 8080
* TargetHttpsProxy: 443
* TargetTcpProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetSslProxy: 25, 43, 110, 143, 195, 443, 465, 587, 700, 993, 995,
1883, 5222
* TargetVpnGateway: 500, 4500
"""
return pulumi.get(self, "port_range")
@property
@pulumi.getter
def ports(self) -> pulumi.Output[Optional[List[str]]]:
"""
This field is used along with the backend_service field for internal
load balancing.
When the load balancing scheme is INTERNAL, a single port or a comma
separated list of ports can be configured. Only packets addressed to
these ports will be forwarded to the backends configured with this
forwarding rule.
You may specify a maximum of up to 5 ports.
"""
return pulumi.get(self, "ports")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
A reference to the region where the regional forwarding rule resides.
This field is not applicable to global forwarding rules.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="serviceLabel")
def service_label(self) -> pulumi.Output[Optional[str]]:
"""
An optional prefix to the service name for this Forwarding Rule.
If specified, will be the first label of the fully qualified service
name.
The label must be 1-63 characters long, and comply with RFC1035.
Specifically, the label must be 1-63 characters long and match the
regular expression `a-z?` which means the first
character must be a lowercase letter, and all following characters
must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
This field is only used for INTERNAL load balancing.
"""
return pulumi.get(self, "service_label")
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Output[str]:
"""
The internal fully qualified service name for this Forwarding Rule. This field is only used for INTERNAL load balancing.
"""
return pulumi.get(self, "service_name")
@property
@pulumi.getter
def subnetwork(self) -> pulumi.Output[str]:
"""
The subnetwork that the load balanced IP should belong to for this
Forwarding Rule. This field is only used for INTERNAL load balancing.
If the network specified is in auto subnet mode, this field is
optional. However, if the network is in custom subnet mode, a
subnetwork must be specified.
"""
return pulumi.get(self, "subnetwork")
@property
@pulumi.getter
def target(self) -> pulumi.Output[Optional[str]]:
"""
The URL of the target resource to receive the matched traffic.
The target must live in the same region as the forwarding rule.
The forwarded traffic must be of a type appropriate to the target
object.
"""
return pulumi.get(self, "target")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 55.194362
| 167
| 0.649956
| 4,688
| 37,201
| 5.058874
| 0.083831
| 0.043599
| 0.043684
| 0.027323
| 0.857691
| 0.830789
| 0.813501
| 0.807598
| 0.803677
| 0.803677
| 0
| 0.014256
| 0.285342
| 37,201
| 673
| 168
| 55.276374
| 0.877793
| 0.624231
| 0
| 0.32
| 1
| 0
| 0.119327
| 0.015811
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12
| false
| 0.004444
| 0.022222
| 0.008889
| 0.262222
| 0.026667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f1848c7963115c27a2a3fb1ec554ff488c6f317
| 14,989
|
py
|
Python
|
tweak/transformer_model.py
|
UKPLab/TWEAC-qa-agent-selection
|
ed4f0cafa87aefd4820cca0d7f4881d2de99a9f0
|
[
"MIT"
] | 9
|
2021-04-16T12:45:45.000Z
|
2022-01-29T10:52:52.000Z
|
tweak/transformer_model.py
|
UKPLab/TWEAC-qa-agent-selection
|
ed4f0cafa87aefd4820cca0d7f4881d2de99a9f0
|
[
"MIT"
] | 1
|
2021-11-25T04:16:25.000Z
|
2021-11-25T09:54:29.000Z
|
tweak/transformer_model.py
|
UKPLab/TWEAC-qa-agent-selection
|
ed4f0cafa87aefd4820cca0d7f4881d2de99a9f0
|
[
"MIT"
] | 3
|
2021-04-16T12:43:41.000Z
|
2021-11-25T04:21:43.000Z
|
import torch
from torch import nn
class TransformerModel(nn.Module):
def __init__(self, config, bert):
super(TransformerModel, self).__init__()
self.config = config
self.model_config = config["model"]
self.agents_extended = config.get("agents_extended", 0)
self.num_labels = len(config["all_agents"])-self.agents_extended
self.bert = bert
self.dropout = nn.Dropout(self.model_config.get("dropout", 0.1))
self.classifier = nn.Conv1d(1, self.num_labels, bert.config.hidden_size)
if self.agents_extended > 0:
self.extend_classifier = nn.Conv1d(1, self.agents_extended, bert.config.hidden_size)
self.sigmoid = nn.Sigmoid()
def forward(self, input_ids=None, attention_mask=None, labels=None, pos_weight=None):
bert_outputs = self.bert(input_ids, attention_mask=attention_mask)
pooled_output = bert_outputs[0][:,0]
pooled_output = self.dropout(pooled_output) # shape (batch_size, class_size)
pooled_output = torch.unsqueeze(pooled_output, 1) # shape (batch_size, 1, hidden_size) for convolution
logits = self.classifier(pooled_output).squeeze(dim=2) # shape (batch_size, num_labels)
if self.agents_extended > 0:
ext_logits = self.extend_classifier(pooled_output).squeeze(dim=2)
logits = torch.cat((logits, ext_logits), dim=1)
outputs = (self.sigmoid(logits),)
if labels is not None:
# against class imbalances
if pos_weight is None:
pos_weight = torch.ones(logits.size()[1]).float()
loss_fct = nn.BCEWithLogitsLoss(pos_weight=pos_weight, reduction="mean")
loss = loss_fct(logits, labels)
outputs = outputs + (loss,)
return outputs # sigmoid(logits), (loss)
class TransformerModelV2(nn.Module):
def __init__(self, config, bert):
super(TransformerModelV2, self).__init__()
self.config = config
self.model_config = config["model"]
self.agents_extended = config.get("agents_extended", 0)
self.num_labels = len(config["all_agents"])-self.agents_extended
self.bert = bert
self.dropout = nn.Dropout(self.model_config.get("dropout", 0.1))
class_dim = self.model_config.get("classification_dim", 756)
self.adapter = nn.Conv1d(1, self.num_labels*class_dim, bert.config.hidden_size)
self.classifier = nn.Conv1d(self.num_labels*class_dim, self.num_labels, 1, groups=self.num_labels)
if self.agents_extended > 0:
self.extend_adapter = nn.Conv1d(1, self.agents_extended*class_dim, bert.config.hidden_size)
self.extend_classifier = nn.Conv1d(self.agents_extended*class_dim, self.agents_extended, 1, groups=self.agents_extended)
self.sigmoid = nn.Sigmoid()
self.softmax = nn.Softmax(dim=1)
def forward(self, input_ids=None, attention_mask=None, labels=None, pos_weight=None, reduction="mean"):
bert_outputs = self.bert(input_ids, attention_mask=attention_mask)
pooled_output = bert_outputs[0][:,0]
pooled_output = self.dropout(pooled_output) # shape (batch_size, hidden_size)
if self.agents_extended > 0:
ext_output = nn.GELU()(self.extend_adapter(pooled_output.unsqueeze(1)))
ext_output = self.dropout(ext_output)
pooled_output = nn.GELU()(self.adapter(pooled_output.unsqueeze(1)))
pooled_output = self.dropout(pooled_output) # shape (batch_size, class_size)
logits = self.classifier(pooled_output).squeeze(dim=2) # shape (batch_size, num_labels)
if self.agents_extended > 0:
ext_logits = self.extend_classifier(ext_output).squeeze(dim=2)
logits = torch.cat((logits, ext_logits), dim=1)
outputs = (self.sigmoid(logits),)
if labels is not None:
# against class imbalances
if pos_weight is None:
pos_weight = torch.ones(logits.size()[1]).float()
loss_fct = nn.BCEWithLogitsLoss(pos_weight=pos_weight, reduction=reduction)
loss = loss_fct(logits, labels)
outputs = outputs + (loss,)
return outputs # sigmoid(logits), (loss)
class TransformerModelSoftmax(nn.Module):
def __init__(self, config, bert):
super(TransformerModelSoftmax, self).__init__()
self.config = config
self.model_config = config["model"]
self.agents_extended = config.get("agents_extended", 0)
self.num_labels = len(config["all_agents"])-self.agents_extended
self.bert = bert
self.dropout = nn.Dropout(self.model_config.get("dropout", 0.1))
class_dim = self.model_config.get("classification_dim", 756)
self.adapter = nn.Conv1d(1, self.num_labels*class_dim, bert.config.hidden_size)
self.classifier = nn.Conv1d(self.num_labels*class_dim, self.num_labels, 1, groups=self.num_labels)
if self.agents_extended > 0:
self.extend_adapter = nn.Conv1d(1, self.agents_extended*class_dim, bert.config.hidden_size)
self.extend_classifier = nn.Conv1d(self.agents_extended*class_dim, self.agents_extended, 1, groups=self.agents_extended)
self.softmax = nn.Softmax(dim=1)
def forward(self, input_ids=None, attention_mask=None, labels=None, pos_weight=None):
bert_outputs = self.bert(input_ids, attention_mask=attention_mask)
pooled_output = bert_outputs[0][:,0]
pooled_output = self.dropout(pooled_output) # shape (batch_size, hidden_size)
if self.agents_extended > 0:
ext_output = nn.GELU()(self.extend_adapter(pooled_output.unsqueeze(1)))
ext_output = self.dropout(ext_output)
pooled_output = nn.GELU()(self.adapter(pooled_output.unsqueeze(1)))
pooled_output = self.dropout(pooled_output) # shape (batch_size, class_size)
logits = self.classifier(pooled_output).squeeze(dim=2) # shape (batch_size, num_labels)
if self.agents_extended > 0:
ext_logits = self.extend_classifier(ext_output).squeeze(dim=2)
logits = torch.cat((logits, ext_logits), dim=1)
outputs = (logits,)
if labels is not None:
# against class imbalances
if pos_weight is None:
pos_weight = torch.ones(logits.size()[1]).float()
loss_fct = nn.CrossEntropyLoss(weight=pos_weight, reduction="mean")
loss = loss_fct(logits, labels)
outputs = outputs + (loss,)
return outputs # sigmoid(logits), (loss)
class TransformerModelMSE(nn.Module):
def __init__(self, config, bert):
super(TransformerModelMSE, self).__init__()
self.config = config
self.model_config = config["model"]
self.agents_extended = config.get("agents_extended", 0)
self.num_labels = len(config["all_agents"])-self.agents_extended
self.bert = bert
self.dropout = nn.Dropout(self.model_config.get("dropout", 0.1))
class_dim = self.model_config.get("classification_dim", 756)
self.adapter = nn.Conv1d(1, self.num_labels*class_dim, bert.config.hidden_size)
self.classifier = nn.Conv1d(self.num_labels*class_dim, self.num_labels, 1, groups=self.num_labels)
if self.agents_extended > 0:
self.extend_adapter = nn.Conv1d(1, self.agents_extended*class_dim, bert.config.hidden_size)
self.extend_classifier = nn.Conv1d(self.agents_extended*class_dim, self.agents_extended, 1, groups=self.agents_extended)
def forward(self, input_ids=None, attention_mask=None, labels=None, pos_weight=None):
bert_outputs = self.bert(input_ids, attention_mask=attention_mask)
pooled_output = bert_outputs[0][:,0]
pooled_output = self.dropout(pooled_output) # shape (batch_size, hidden_size)
if self.agents_extended > 0:
ext_output = nn.GELU()(self.extend_adapter(pooled_output.unsqueeze(1)))
ext_output = self.dropout(ext_output)
pooled_output = nn.GELU()(self.adapter(pooled_output.unsqueeze(1)))
pooled_output = self.dropout(pooled_output) # shape (batch_size, class_size)
logits = self.classifier(pooled_output).squeeze(dim=2) # shape (batch_size, num_labels)
if self.agents_extended > 0:
ext_logits = self.extend_classifier(ext_output).squeeze(dim=2)
logits = torch.cat((logits, ext_logits), dim=1)
outputs = (logits,)
if labels is not None:
loss_fct = nn.MSELoss()
loss = loss_fct(logits, labels)
outputs = outputs + (loss,)
return outputs # sigmoid(logits), (loss)
class TransformerModelPairwise(nn.Module):
def __init__(self, config, bert):
super(TransformerModelPairwise, self).__init__()
self.config = config
self.model_config = config["model"]
self.agents_extended = config.get("agents_extended", 0)
self.num_labels = len(config["all_agents"])-self.agents_extended
self.bert = bert
self.dropout = nn.Dropout(self.model_config.get("dropout", 0.1))
class_dim = self.model_config.get("classification_dim", 756)
self.adapter = nn.Conv1d(1, self.num_labels*class_dim, bert.config.hidden_size)
self.classifier = nn.Conv1d(self.num_labels*class_dim, self.num_labels, 1, groups=self.num_labels)
if self.agents_extended > 0:
self.extend_adapter = nn.Conv1d(1, self.agents_extended*class_dim, bert.config.hidden_size)
self.extend_classifier = nn.Conv1d(self.agents_extended*class_dim, self.agents_extended, 1, groups=self.agents_extended)
def forward(self, input_ids=None, attention_mask=None, labels=None, pos_weight=None):
bert_outputs = self.bert(input_ids, attention_mask=attention_mask)
pooled_output = bert_outputs[0][:,0]
pooled_output = self.dropout(pooled_output) # shape (batch_size, hidden_size)
if self.agents_extended > 0:
ext_output = nn.GELU()(self.extend_adapter(pooled_output.unsqueeze(1)))
ext_output = self.dropout(ext_output)
pooled_output = nn.GELU()(self.adapter(pooled_output.unsqueeze(1)))
pooled_output = self.dropout(pooled_output) # shape (batch_size, class_size)
logits = self.classifier(pooled_output).squeeze(dim=2) # shape (batch_size, num_labels)
if self.agents_extended > 0:
ext_logits = self.extend_classifier(ext_output).squeeze(dim=2)
logits = torch.cat((logits, ext_logits), dim=1)
outputs = (logits,)
if labels is not None:
loss_fct = nn.MultiLabelMarginLoss()
loss = loss_fct(logits, labels)
outputs = outputs + (loss,)
return outputs # sigmoid(logits), (loss)
class TransformerModelV3(nn.Module):
def __init__(self, config, bert):
super(TransformerModelV3, self).__init__()
self.config = config
self.model_config = config["model"]
self.agents_extended = config.get("agents_extended", 0)
self.num_labels = len(config["all_agents"])-self.agents_extended
self.bert = bert
self.dropout = nn.Dropout(self.model_config.get("dropout", 0.1))
class_dim = self.model_config.get("classification_dim", 756)
self.preclass1 = nn.Linear(bert.config.hidden_size, class_dim//2)
self.preclass2 = nn.Linear(class_dim//2, class_dim)
self.embedding = nn.Parameter(torch.FloatTensor(self.num_labels, class_dim).uniform_(-1, 1))
if self.agents_extended > 0:
self.extend_embedding = nn.Parameter(torch.FloatTensor(self.agents_extended, class_dim).uniform_(-1, 1))
self.cosine = nn.CosineSimilarity(dim=2)
def forward(self, input_ids=None, attention_mask=None, labels=None, pos_weight=None):
bert_outputs = self.bert(input_ids, attention_mask=attention_mask)
pooled_output = bert_outputs[0][:,0]
pooled_output = self.dropout(pooled_output) # shape (batch_size, hidden_size)
pooled_output = nn.GELU()(self.preclass1(pooled_output))
pooled_output = self.dropout(pooled_output) # shape (batch_size, hidden_size)
pooled_output = nn.GELU()(self.preclass2(pooled_output))
cosine = self.cosine(self.embedding.unsqueeze(dim=0).repeat(pooled_output.size()[0], 1, 1), pooled_output.unsqueeze(1).repeat(1, self.num_labels, 1))
if self.agents_extended > 0:
ext_cosine = self.cosine(self.extend_embedding.unsqueeze(dim=0).repeat(pooled_output.size()[0], 1, 1), pooled_output.unsqueeze(1).repeat(1, self.agents_extended, 1))
cosine = torch.cat((cosine, ext_cosine), dim=1)
outputs = (cosine,)
if labels is not None:
# against class imbalances
if pos_weight is None:
pos_weight = torch.ones(cosine.size()[1]).float()
pos_weight = torch.clamp(pos_weight.repeat(cosine.size()[0], 1) * labels, 1, 1000)
loss_fct = nn.HingeEmbeddingLoss(reduction="none")
cos_dist = 1-cosine
labels = labels*2 - 1 # transform to -1, 1 labels
hinges = torch.cat([loss_fct(cos_dist[:, i], labels[:, i]) for i in range(cos_dist.size()[1])]).reshape(cos_dist.size()[0], -1)
loss = torch.mean(pos_weight*hinges)
outputs = outputs + (loss,)
return outputs # sigmoid(logits), (loss)
class TransformerModelPretrainQC(nn.Module):
def __init__(self, config, bert):
super(TransformerModelPretrainQC, self).__init__()
self.config = config
self.model_config = config["model"]
self.num_labels = len(config["all_agents"])
self.bert = bert
self.dropout = nn.Dropout(self.model_config.get("dropout", 0.1))
class_dim = self.model_config.get("classification_dim", 756)
self.preclass = nn.Linear(bert.config.hidden_size, class_dim)
self.classifier = nn.Linear(class_dim, self.num_labels)
self.softmax = nn.Softmax(dim=1)
def forward(self, input_ids=None, attention_mask=None, labels=None, weights=None):
bert_outputs = self.bert(input_ids, attention_mask=attention_mask)
pooled_output = bert_outputs[0][:,0]
pooled_output = self.dropout(pooled_output) # shape (batch_size, hidden_size)
pooled_output = nn.Tanh()(self.preclass(pooled_output))
pooled_output = self.dropout(pooled_output) # shape (batch_size, class_size)
logits = self.classifier(pooled_output) # shape (batch_size, num_labels)
outputs = (self.softmax(logits),)
if labels is not None:
# against class imbalances
if weights is None:
weights = torch.ones(logits.size()[1]).float()
loss_fct = nn.CrossEntropyLoss(weight=weights, reduction="mean")
loss = loss_fct(logits, labels)
outputs = outputs + (loss,)
return outputs # sigmoid(logits), (loss)
| 52.045139
| 177
| 0.667089
| 1,934
| 14,989
| 4.935884
| 0.054292
| 0.080453
| 0.088624
| 0.033522
| 0.885921
| 0.870836
| 0.85397
| 0.846428
| 0.811125
| 0.805573
| 0
| 0.015479
| 0.215558
| 14,989
| 287
| 178
| 52.226481
| 0.796394
| 0.064381
| 0
| 0.775934
| 0
| 0
| 0.02659
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058091
| false
| 0
| 0.008299
| 0
| 0.124481
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6160d2e12fcb28bcd451e01996ffa0a3dc2a83f9
| 12,683
|
py
|
Python
|
tests/controls/test_before_experiment_control.py
|
chaostoolkit-incubator/chaostoolkit-reliably
|
f7d7f1f262b9416f6caa66ade2082119d9718d50
|
[
"Apache-2.0"
] | null | null | null |
tests/controls/test_before_experiment_control.py
|
chaostoolkit-incubator/chaostoolkit-reliably
|
f7d7f1f262b9416f6caa66ade2082119d9718d50
|
[
"Apache-2.0"
] | 4
|
2021-07-22T14:07:36.000Z
|
2022-01-28T12:50:22.000Z
|
tests/controls/test_before_experiment_control.py
|
chaostoolkit-incubator/chaostoolkit-reliably
|
f7d7f1f262b9416f6caa66ade2082119d9718d50
|
[
"Apache-2.0"
] | null | null | null |
from typing import Any, Dict, cast
from unittest.mock import MagicMock, patch
from chaosreliably.controls import experiment
from chaosreliably.types import (
EntityContext,
EntityContextExperimentEventLabels,
EntityContextExperimentLabels,
EntityContextExperimentRunLabels,
EntityContextExperimentVersionLabels,
EntityContextMetadata,
EventType,
)
@patch("chaosreliably.controls.experiment._create_experiment_event")
@patch("chaosreliably.controls.experiment._create_experiment_run")
@patch("chaosreliably.controls.experiment._create_experiment_version")
@patch("chaosreliably.controls.experiment._create_experiment")
def test_that_create_experiment_entities_for_before_experiment_control_creates_entities(
mock_create_experiment: MagicMock,
mock_create_experiment_version: MagicMock,
mock_create_experiment_run: MagicMock,
mock_create_experiment_event: MagicMock,
) -> None:
title = "A title"
commit_hash = "59f9f577e2d90719098f4d23d26329ce41f2d0bd"
source = "https://github.com/chaostoolkit-incubator/chaostoolkit-reliably/exp.json"
user = "TestUser"
name = f"Experiment: {title} - Started"
experiment_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentLabels(title=title),
)
)
experiment_version_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentVersionLabels(
commit_hash=commit_hash,
source=source,
),
related_to=[experiment_context.metadata.labels],
)
)
experiment_run_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentRunLabels(user=user),
related_to=[experiment_version_context.metadata.labels],
)
)
experiment_event_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentEventLabels(
event_type=EventType.EXPERIMENT_START.value,
name=name,
output=str(None),
),
related_to=[experiment_run_context.metadata.labels],
)
)
mock_create_experiment.return_value = experiment_context.metadata.labels
mock_create_experiment_version.return_value = (
experiment_version_context.metadata.labels
)
mock_create_experiment_run.return_value = experiment_run_context.metadata.labels
mock_create_experiment_event.return_value = experiment_event_context.metadata.labels
experiment_run_labels = (
experiment._create_experiment_entities_for_before_experiment_control(
experiment_title=title,
commit_hash=commit_hash,
source=source,
user=user,
configuration=None,
secrets=None,
)
)
assert experiment_run_labels == experiment_run_context.metadata.labels
mock_create_experiment.assert_called_once_with(
experiment_title=title, configuration=None, secrets=None, related_to_labels=[]
)
mock_create_experiment_version.assert_called_once_with(
commit_hash=commit_hash,
source=source,
experiment_labels=experiment_context.metadata.labels,
configuration=None,
secrets=None,
)
mock_create_experiment_run.assert_called_once_with(
user=user,
experiment_version_labels=experiment_version_context.metadata.labels,
configuration=None,
secrets=None,
)
mock_create_experiment_event.assert_called_once_with(
event_type=EventType.EXPERIMENT_START,
name=name,
output=None,
experiment_run_labels=experiment_run_context.metadata.labels,
configuration=None,
secrets=None,
)
@patch("chaosreliably.controls.experiment._create_experiment_event")
@patch("chaosreliably.controls.experiment._create_experiment_run")
@patch("chaosreliably.controls.experiment._create_experiment_version")
@patch("chaosreliably.controls.experiment._create_experiment")
def test_that_create_experiment_entities_for_before_experiment_control_creates_entities_when_experiment_has_relations( # Noqa
mock_create_experiment: MagicMock,
mock_create_experiment_version: MagicMock,
mock_create_experiment_run: MagicMock,
mock_create_experiment_event: MagicMock,
) -> None:
title = "A title"
commit_hash = "59f9f577e2d90719098f4d23d26329ce41f2d0bd"
source = "https://github.com/chaostoolkit-incubator/chaostoolkit-reliably/exp.json"
user = "TestUser"
name = f"Experiment: {title} - Started"
related_to_labels = [
{"name": "SLO Name 1", "service": "My services name"},
{"random_key": "A random value"},
]
experiment_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentLabels(title=title),
related_to=related_to_labels,
)
)
experiment_version_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentVersionLabels(
commit_hash=commit_hash,
source=source,
),
related_to=[experiment_context.metadata.labels],
)
)
experiment_run_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentRunLabels(user=user),
related_to=[experiment_version_context.metadata.labels],
)
)
experiment_event_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentEventLabels(
event_type=EventType.EXPERIMENT_START.value,
name=name,
output=str(None),
),
related_to=[experiment_run_context.metadata.labels],
)
)
mock_create_experiment.return_value = experiment_context.metadata.labels
mock_create_experiment_version.return_value = (
experiment_version_context.metadata.labels
)
mock_create_experiment_run.return_value = experiment_run_context.metadata.labels
mock_create_experiment_event.return_value = experiment_event_context.metadata.labels
experiment_run_labels = (
experiment._create_experiment_entities_for_before_experiment_control(
experiment_title=title,
commit_hash=commit_hash,
source=source,
user=user,
configuration=None,
secrets=None,
experiment_related_to_labels=related_to_labels,
)
)
assert experiment_run_labels == experiment_run_context.metadata.labels
mock_create_experiment.assert_called_once_with(
experiment_title=title,
configuration=None,
secrets=None,
related_to_labels=related_to_labels,
)
mock_create_experiment_version.assert_called_once_with(
commit_hash=commit_hash,
source=source,
experiment_labels=experiment_context.metadata.labels,
configuration=None,
secrets=None,
)
mock_create_experiment_run.assert_called_once_with(
user=user,
experiment_version_labels=experiment_version_context.metadata.labels,
configuration=None,
secrets=None,
)
mock_create_experiment_event.assert_called_once_with(
event_type=EventType.EXPERIMENT_START,
name=name,
output=None,
experiment_run_labels=experiment_run_context.metadata.labels,
configuration=None,
secrets=None,
)
@patch(
"chaosreliably.controls.experiment._create_experiment_entities_for_before_experiment_control" # Noqa
)
def test_before_experiment_control_calls_create_experiment_entities(
mock_create_experiment_entities: MagicMock,
) -> None:
configuration = {"random_config": {"hi": "hello"}, "thing": 123}
title = "A title"
commit_hash = "59f9f577e2d90719098f4d23d26329ce41f2d0bd"
source = "https://github.com/chaostoolkit-incubator/chaostoolkit-reliably/exp.json"
user = "TestUser"
experiment_run_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentRunLabels(user=user)
)
)
mock_create_experiment_entities.return_value = (
experiment_run_context.metadata.labels
)
experiment.before_experiment_control(
context={"title": title},
**{
"configuration": configuration,
"secrets": None,
"commit_hash": commit_hash,
"source": source,
"user": user,
},
)
mock_create_experiment_entities.assert_called_once_with(
experiment_title=title,
commit_hash=commit_hash,
source=source,
user=user,
configuration=configuration,
secrets=None,
experiment_related_to_labels=[],
)
assert "chaosreliably" in configuration
chaosreliably = cast(Dict[str, Any], configuration["chaosreliably"])
assert (
chaosreliably["experiment_run_labels"] == experiment_run_context.metadata.labels
)
@patch(
"chaosreliably.controls.experiment._create_experiment_entities_for_before_experiment_control" # Noqa
)
def test_before_experiment_control_calls_create_experiment_entities_when_experiment_has_relations( # Noqa
mock_create_experiment_entities: MagicMock,
) -> None:
configuration = {"random_config": {"hi": "hello"}, "thing": 123}
title = "A title"
commit_hash = "59f9f577e2d90719098f4d23d26329ce41f2d0bd"
source = "https://github.com/chaostoolkit-incubator/chaostoolkit-reliably/exp.json"
user = "TestUser"
related_to_labels = [
{"name": "SLO Name 1", "service": "My services name"},
{"random_key": "A random value"},
]
experiment_run_context = EntityContext(
metadata=EntityContextMetadata(
labels=EntityContextExperimentRunLabels(user=user),
related_to=related_to_labels,
)
)
mock_create_experiment_entities.return_value = (
experiment_run_context.metadata.labels
)
experiment.before_experiment_control(
context={"title": title},
**{
"configuration": configuration,
"secrets": None,
"commit_hash": commit_hash,
"source": source,
"user": user,
"experiment_related_to_labels": related_to_labels,
},
)
mock_create_experiment_entities.assert_called_once_with(
experiment_title=title,
commit_hash=commit_hash,
source=source,
user=user,
configuration=configuration,
secrets=None,
experiment_related_to_labels=related_to_labels,
)
assert "chaosreliably" in configuration
chaosreliably = cast(Dict[str, Any], configuration["chaosreliably"])
assert (
chaosreliably["experiment_run_labels"] == experiment_run_context.metadata.labels
)
@patch("chaosreliably.controls.experiment.logger")
@patch(
"chaosreliably.controls.experiment._create_experiment_entities_for_before_experiment_control" # Noqa
)
def test_that_before_experiment_control_does_nothing_if_kwargs_not_present(
mock_create_experiment_entities: MagicMock,
mock_logger: MagicMock,
) -> None:
experiment.before_experiment_control(
context={"title": "a-title"}, **{"configuration": None, "secrets": None}
)
mock_logger.debug.assert_called_once_with(
"The parameters: `commit_hash`, `source`, and `user` are required for the "
"chaosreliably controls, please provide them. This Experiment Run will not "
"be tracked with Reliably."
)
mock_create_experiment_entities.assert_not_called()
@patch("chaosreliably.controls.experiment.logger")
@patch(
"chaosreliably.controls.experiment._create_experiment_entities_for_before_experiment_control" # Noqa
)
def test_that_an_exception_does_not_get_raised_and_warning_logged(
mock_create_experiment_entities: MagicMock, mock_logger: MagicMock
) -> None:
mock_create_experiment_entities.side_effect = Exception("An exception happened")
experiment.before_experiment_control(
context={"title": "a-title"},
**{
"configuration": None,
"secrets": None,
"commit_hash": "blah",
"source": "blah",
"user": "blah",
},
)
mock_logger.debug.assert_called_once_with(
"An error occurred: An exception happened, whilst running the Before Experiment"
" control, no further entities will be created, the Experiment execution won't"
" be affected"
)
| 36.133903
| 126
| 0.703777
| 1,214
| 12,683
| 6.979407
| 0.10626
| 0.098194
| 0.080255
| 0.059483
| 0.907589
| 0.900744
| 0.900744
| 0.889531
| 0.889531
| 0.841379
| 0
| 0.011654
| 0.21517
| 12,683
| 350
| 127
| 36.237143
| 0.839562
| 0.002287
| 0
| 0.710769
| 0
| 0
| 0.182257
| 0.089033
| 0
| 0
| 0
| 0
| 0.058462
| 1
| 0.018462
| false
| 0
| 0.012308
| 0
| 0.030769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61831a77b2543543f34026115ed1aead58ee501f
| 16,367
|
py
|
Python
|
rti_python/Ensemble/RangeTracking.py
|
JeromeJGuay/viking_ADCP_processing
|
24ea1ba6d7e72d956435811bcc5519807396d88f
|
[
"MIT"
] | null | null | null |
rti_python/Ensemble/RangeTracking.py
|
JeromeJGuay/viking_ADCP_processing
|
24ea1ba6d7e72d956435811bcc5519807396d88f
|
[
"MIT"
] | 1
|
2021-11-25T20:13:06.000Z
|
2021-11-25T20:13:06.000Z
|
rti_python/Ensemble/RangeTracking.py
|
JeromeJGuay/viking_ADCP_processing
|
24ea1ba6d7e72d956435811bcc5519807396d88f
|
[
"MIT"
] | null | null | null |
from rti_python.Ensemble.Ensemble import Ensemble
import logging
class RangeTracking:
"""
Range Tracking DataSet.
Values that give details about the wave heights.
"""
def __init__(self, num_elements=8, element_multiplier=1):
self.ds_type = 10 # Float
self.num_elements = num_elements
self.element_multiplier = element_multiplier
self.image = 0
self.name_len = 8
self.Name = "E000015\0"
self.NumBeams = 0.0
self.SNR = []
self.Range = []
self.Pings = []
self.Amplitude = []
self.Correlation = []
self.BeamVelocity = []
self.InstrumentVelocity = []
self.EarthVelocity = []
def decode(self, data):
"""
Take the data bytearray. Decode the data to populate
the values.
:param data: Bytearray for the dataset.
"""
packet_pointer = Ensemble.GetBaseDataSize(self.name_len)
self.NumBeams = Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 0, Ensemble().BytesInFloat, data)
self.num_elements = (8 * int(self.NumBeams)) + 1
if self.NumBeams == 4.0:
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 1, Ensemble().BytesInFloat, data))
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 2, Ensemble().BytesInFloat, data))
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 3, Ensemble().BytesInFloat, data))
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 4, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 5, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 6, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 7, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 8, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 9, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 10, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 11, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 12, Ensemble().BytesInFloat, data))
if len(data) > 80:
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 13, Ensemble().BytesInFloat, data))
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 14, Ensemble().BytesInFloat, data))
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 15, Ensemble().BytesInFloat, data))
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 16, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 17, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 18, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 19, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 20, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 21, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 22, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 23, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 24, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 25, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 26, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 27, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 28, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 29, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 30, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 31, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 32, Ensemble().BytesInFloat, data))
elif self.NumBeams == 3.0:
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 1, Ensemble().BytesInFloat, data))
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 2, Ensemble().BytesInFloat, data))
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 3, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 4, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 5, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 6, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 7, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 8, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 9, Ensemble().BytesInFloat, data))
if len(data) > 68:
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 10, Ensemble().BytesInFloat, data))
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 11, Ensemble().BytesInFloat, data))
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 12, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 13, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 14, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 15, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 16, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 17, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 18, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 19, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 20, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 21, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 22, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 23, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 24, Ensemble().BytesInFloat, data))
elif self.NumBeams == 2.0:
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 1, Ensemble().BytesInFloat, data))
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 2, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 3, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 4, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 5, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 6, Ensemble().BytesInFloat, data))
if len(data) > 56:
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 7, Ensemble().BytesInFloat, data))
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 8, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 9, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 10, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 11, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 12, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 13, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 14, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 15, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 16, Ensemble().BytesInFloat, data))
elif self.NumBeams == 1.0:
self.SNR.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 1, Ensemble().BytesInFloat, data))
self.Range.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 2, Ensemble().BytesInFloat, data))
self.Pings.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 3, Ensemble().BytesInFloat, data))
if len(data) > 44:
self.Amplitude.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 4, Ensemble().BytesInFloat, data))
self.Correlation.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 5, Ensemble().BytesInFloat, data))
self.BeamVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 6, Ensemble().BytesInFloat, data))
self.InstrumentVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 7, Ensemble().BytesInFloat, data))
self.EarthVelocity.append(Ensemble.GetFloat(packet_pointer + Ensemble().BytesInFloat * 8, Ensemble().BytesInFloat, data))
logging.debug(self.NumBeams)
logging.debug(self.SNR)
logging.debug(self.Range)
logging.debug(self.Pings)
logging.debug(self.Amplitude)
logging.debug(self.Correlation)
logging.debug(self.BeamVelocity)
logging.debug(self.InstrumentVelocity)
logging.debug(self.EarthVelocity)
def encode(self):
"""
Encode the data into RTB format.
:return:
"""
result = []
self.num_elements = (8 * int(self.NumBeams)) + 1 # 8 is the number of list plus 1 for NumBeams
# Generate header
result += Ensemble.generate_header(self.ds_type,
self.num_elements,
self.element_multiplier,
self.image,
self.name_len,
self.Name)
# Add the data
result += Ensemble.float_to_bytes(self.NumBeams)
for beam in range(len(self.SNR)):
result += Ensemble.float_to_bytes(self.SNR[beam])
for beam in range(len(self.Range)):
result += Ensemble.float_to_bytes(self.Range[beam])
for beam in range(len(self.Pings)):
result += Ensemble.float_to_bytes(self.Pings[beam])
for beam in range(len(self.Amplitude)):
result += Ensemble.float_to_bytes(self.Amplitude[beam])
for beam in range(len(self.Correlation)):
result += Ensemble.float_to_bytes(self.Correlation[beam])
for beam in range(len(self.BeamVelocity)):
result += Ensemble.float_to_bytes(self.BeamVelocity[beam])
for beam in range(len(self.InstrumentVelocity)):
result += Ensemble.float_to_bytes(self.InstrumentVelocity[beam])
for beam in range(len(self.EarthVelocity)):
result += Ensemble.float_to_bytes(self.EarthVelocity[beam])
return result
def encode_csv(self, dt, ss_code, ss_config, blank=0, bin_size=0):
"""
Encode into CSV format.
:param dt: Datetime object.
:param ss_code: Subsystem code.
:param ss_config: Subsystem Configuration
:param blank: Blank or first bin position in meters.
:param bin_size: Bin size in meters.
:return: List of CSV lines.
"""
str_result = []
# Create the CSV strings
for beams in range(len(self.Range)):
str_result.append(Ensemble.gen_csv_line(dt, Ensemble.CSV_RT_RANGE, ss_code, ss_config, 0, beams, blank, bin_size, self.Range[beams]))
for beams in range(len(self.Pings)):
str_result.append(Ensemble.gen_csv_line(dt, Ensemble.CSV_RT_PINGS, ss_code, ss_config, 0, beams, blank, bin_size, self.Pings[beams]))
for beams in range(len(self.BeamVelocity)):
str_result.append(Ensemble.gen_csv_line(dt, Ensemble.CSV_RT_BEAM_VEL, ss_code, ss_config, 0, beams, blank, bin_size, self.BeamVelocity[beams]))
for beams in range(len(self.InstrumentVelocity)):
str_result.append(Ensemble.gen_csv_line(dt, Ensemble.CSV_RT_INSTR_VEL, ss_code, ss_config, 0, beams, blank, bin_size, self.InstrumentVelocity[beams]))
for beams in range(len(self.EarthVelocity)):
str_result.append(Ensemble.gen_csv_line(dt, Ensemble.CSV_RT_EARTH_VEL, ss_code, ss_config, 0, beams, blank, bin_size, self.EarthVelocity[beams]))
return str_result
def avg_range(self):
"""
Average the range values.
Only accumulate the good values.
:return: Average of the range values.
"""
# Accumulate the data
avg = 0.0
cnt = 0
for rng in self.Range:
if rng > 0.0:
avg += rng
cnt += 1
# Average the data and return it
if cnt >= 1:
return avg / cnt
return 0.0
| 62.231939
| 163
| 0.658642
| 1,695
| 16,367
| 6.257227
| 0.087906
| 0.305487
| 0.162361
| 0.221478
| 0.839619
| 0.82359
| 0.779276
| 0.752593
| 0.746559
| 0.741656
| 0
| 0.01412
| 0.225454
| 16,367
| 262
| 164
| 62.469466
| 0.822513
| 0.043685
| 0
| 0.111765
| 0
| 0
| 0.000593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0
| 0.011765
| 0
| 0.070588
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6189c9493a0e59b8174e8eab575f242b0b88c547
| 171
|
py
|
Python
|
PyBugger/file_loader.py
|
flabbet/Pybugger
|
1ecb81a89f484bd7570aec0955ceb32763196605
|
[
"MIT"
] | null | null | null |
PyBugger/file_loader.py
|
flabbet/Pybugger
|
1ecb81a89f484bd7570aec0955ceb32763196605
|
[
"MIT"
] | null | null | null |
PyBugger/file_loader.py
|
flabbet/Pybugger
|
1ecb81a89f484bd7570aec0955ceb32763196605
|
[
"MIT"
] | null | null | null |
import importlib
def load_py_file(file_path):
if ".py" in file_path:
file_path = file_path.replace(".py", "")
return importlib.import_module(file_path)
| 19
| 48
| 0.690058
| 25
| 171
| 4.4
| 0.48
| 0.363636
| 0.218182
| 0.290909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192982
| 171
| 8
| 49
| 21.375
| 0.797101
| 0
| 0
| 0
| 0
| 0
| 0.035294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
61a8c58453f17302e22c8cd62294f97038c0346c
| 59,786
|
py
|
Python
|
sharpy/structure/utils/lagrangeconstraints.py
|
ostodieck/sharpy
|
b85aa1c001a0ec851af4eb259cce7c01dfa68b9e
|
[
"BSD-3-Clause"
] | 1
|
2020-07-27T05:15:35.000Z
|
2020-07-27T05:15:35.000Z
|
sharpy/structure/utils/lagrangeconstraints.py
|
briandesilva/sharpy
|
aed86428ff88fd14d36cabd91cf7e04b5fc9a39a
|
[
"BSD-3-Clause"
] | null | null | null |
sharpy/structure/utils/lagrangeconstraints.py
|
briandesilva/sharpy
|
aed86428ff88fd14d36cabd91cf7e04b5fc9a39a
|
[
"BSD-3-Clause"
] | 1
|
2020-05-25T17:11:09.000Z
|
2020-05-25T17:11:09.000Z
|
"""
LagrangeConstraints library
Library used to create the matrices associate to boundary conditions through
the method of Lagrange Multipliers
Args:
Returns:
Examples:
To use this library: import sharpy.structure.utils.lagrangeconstraints as lagrangeconstraints
Notes:
"""
from abc import ABCMeta, abstractmethod
import sharpy.utils.cout_utils as cout
import os
import ctypes as ct
import numpy as np
import sharpy.utils.algebra as algebra
dict_of_lc = {}
lc = {} # for internal working
# decorator
def lagrangeconstraint(arg):
# global available_solvers
global dict_of_lc
try:
arg._lc_id
except AttributeError:
raise AttributeError('Class defined as lagrange constraint has no _lc_id attribute')
dict_of_lc[arg._lc_id] = arg
return arg
def print_available_lc():
cout.cout_wrap('The available lagrange constraints on this session are:', 2)
for name, i_lc in dict_of_lc.items():
cout.cout_wrap('%s ' % i_lc._lc_id, 2)
def lc_from_string(string):
return dict_of_lc[string]
def lc_list_from_path(cwd):
onlyfiles = [f for f in os.listdir(cwd) if os.path.isfile(os.path.join(cwd, f))]
for i_file in range(len(onlyfiles)):
if ".py" in onlyfiles[i_file]:
if onlyfiles[i_file] == "__init__.py":
onlyfiles[i_file] = ""
continue
onlyfiles[i_file] = onlyfiles[i_file].replace('.py', '')
else:
onlyfiles[i_file] = ""
files = [file for file in onlyfiles if not file == ""]
return files
def initialise_lc(lc_name, print_info=True):
if print_info:
cout.cout_wrap('Generating an instance of %s' % lc_name, 2)
cls_type = lc_from_string(lc_name)
lc = cls_type()
return lc
class BaseLagrangeConstraint(metaclass=ABCMeta):
def __init__(self):
self._n_eq = None
self._ieq = None
@abstractmethod
def get_n_eq(self):
pass
@abstractmethod
# def initialise(self, **kwargs):
def initialise(self, MBdict_entry, ieq):
pass
@abstractmethod
# def staticmat(self, **kwargs):
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
pass
@abstractmethod
# def dynamicmat(self, **kwargs):
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
pass
@abstractmethod
# def staticpost(self, **kwargs):
def staticpost(self, lc_list, MB_beam, MB_tstep):
pass
@abstractmethod
# def dynamicpost(self, **kwargs):
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
pass
################################################################################
# Auxiliar functions
################################################################################
def define_node_dof(MB_beam, node_body, num_node):
"""
define_node_dof
Define the position of the first degree of freedom associated to a certain node
Args:
MB_beam(list): list of 'Beam'
node_body(int): body to which the node belongs
num_node(int): number os the node within the body
Returns:
node_dof(int): first degree of freedom associated to the node
Examples:
Notes:
"""
node_dof = 0
for ibody in range(node_body):
node_dof += MB_beam[ibody].num_dof.value
if MB_beam[ibody].FoR_movement == 'free':
node_dof += 10
node_dof += 6*MB_beam[node_body].vdof[num_node]
return node_dof
def define_FoR_dof(MB_beam, FoR_body):
"""
define_FoR_dof
Define the position of the first degree of freedom associated to a certain frame of reference
Args:
MB_beam(list): list of 'Beam'
node_body(int): body to which the node belongs
num_node(int): number os the node within the body
Returns:
node_dof(int): first degree of freedom associated to the node
Examples:
Notes:
"""
FoR_dof = 0
for ibody in range(FoR_body):
FoR_dof += MB_beam[ibody].num_dof.value
if MB_beam[ibody].FoR_movement == 'free':
FoR_dof += 10
FoR_dof += MB_beam[FoR_body].num_dof.value
return FoR_dof
################################################################################
# Equations
################################################################################
def equal_lin_vel_node_FoR(MB_tstep, MB_beam, FoR_body, node_body, node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q):
# Variables names. The naming of the variables can be quite confusing. The reader should think that
# the BC relates one "node" and one "FoR" (writen between quotes in these lines).
# If a variable is related to one of them starts with "node_" or "FoR_" respectively
# node_number: number of the "node" within its own body
# node_body: body number of the "node"
# node_FoR_dof: position of the first degree of freedom of the FoR to which the "node" belongs
# node_dof: position of the first degree of freedom associated to the "node"
# FoR_body: body number of the "FoR"
# FoR_dof: position of the first degree of freedom associated to the "FoR"
num_LM_eq_specific = 3
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
Bnh[:, FoR_dof:FoR_dof+3] = algebra.quat2rotation(MB_tstep[FoR_body].quat)
Bnh[:, node_dof:node_dof+3] = -1.0*algebra.quat2rotation(MB_tstep[node_body].quat)
if MB_beam[node_body].FoR_movement == 'free':
Bnh[:, node_FoR_dof:node_FoR_dof+3] = -1.0*algebra.quat2rotation(MB_tstep[node_body].quat)
Bnh[:, node_FoR_dof+3:node_FoR_dof+6] = 1.0*np.dot(algebra.quat2rotation(MB_tstep[node_body].quat),algebra.skew(MB_tstep[node_body].pos[node_number,:]))
LM_C[sys_size+ieq:sys_size+ieq+num_LM_eq_specific,:sys_size] += scalingFactor*Bnh
LM_C[:sys_size,sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += scalingFactor*np.transpose(Bnh)
LM_Q[:sys_size] += scalingFactor*np.dot(np.transpose(Bnh),Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_Q[sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += (np.dot(algebra.quat2rotation(MB_tstep[FoR_body].quat),MB_tstep[FoR_body].for_vel[0:3]) +
-1.0*np.dot(algebra.quat2rotation(MB_tstep[node_body].quat),
MB_tstep[node_body].pos_dot[node_number,:] +
MB_tstep[node_body].for_vel[0:3] +
-1.0*np.dot(algebra.skew(MB_tstep[node_body].pos[node_number,:]),MB_tstep[node_body].for_vel[3:6])))
LM_C[FoR_dof:FoR_dof+3,FoR_dof+6:FoR_dof+10] += algebra.der_CquatT_by_v(MB_tstep[FoR_body].quat,scalingFactor*Lambda_dot[ieq:ieq+num_LM_eq_specific])
if MB_beam[node_body].FoR_movement == 'free':
LM_C[node_dof:node_dof+3,node_FoR_dof+6:node_FoR_dof+10] -= algebra.der_CquatT_by_v(MB_tstep[node_body].quat,scalingFactor*Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_C[node_FoR_dof:node_FoR_dof+3,node_FoR_dof+6:node_FoR_dof+10] -= algebra.der_CquatT_by_v(MB_tstep[node_body].quat,scalingFactor*Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_C[node_FoR_dof+3:node_FoR_dof+6,node_FoR_dof+6:node_FoR_dof+10] -= np.dot(algebra.skew(MB_tstep[node_body].pos[node_number,:]),
algebra.der_CquatT_by_v(MB_tstep[node_body].quat,
scalingFactor*Lambda_dot[ieq:ieq+num_LM_eq_specific]))
LM_K[node_FoR_dof+3:node_FoR_dof+6,node_dof:node_dof+3] += algebra.skew(np.dot(algebra.quat2rotation(MB_tstep[node_body].quat).T,Lambda_dot[ieq:ieq+num_LM_eq_specific]))
ieq += 3
return ieq
def def_rot_axis_FoR_wrt_node(MB_tstep, MB_beam, FoR_body, node_body, node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, rot_axisB, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q, indep):
# Variables names. The naming of the variables can be quite confusing. The reader should think that
# the BC relates one "node" and one "FoR" (writen between quotes in these lines).
# If a variable is related to one of them starts with "node_" or "FoR_" respectively
# node_number: number of the "node" within its own body
# node_body: body number of the "node"
# node_FoR_dof: position of the first degree of freedom of the FoR to which the "node" belongs
# node_dof: position of the first degree of freedom associated to the "node"
# FoR_body: body number of the "FoR"
# FoR_dof: position of the first degree of freedom associated to the "FoR"
ielem, inode_in_elem = MB_beam[node_body].node_master_elem[node_number]
if not indep:
aux_Bnh = algebra.multiply_matrices(algebra.skew(rot_axisB),
algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]).T,
algebra.quat2rotation(MB_tstep[node_body].quat).T,
algebra.quat2rotation(MB_tstep[FoR_body].quat))
# indep = None
n0 = np.linalg.norm(aux_Bnh[0,:])
n1 = np.linalg.norm(aux_Bnh[1,:])
n2 = np.linalg.norm(aux_Bnh[2,:])
if ((n0 < n1) and (n0 < n2)):
# indep = np.array([1,2], dtype = int)
indep[:] = [1, 2]
# new_Lambda_dot = np.array([0., Lambda_dot[ieq], Lambda_dot[ieq+1]])
elif ((n1 < n0) and (n1 < n2)):
# indep = np.array([0,2], dtype = int)
indep[:] = [0, 2]
# new_Lambda_dot = np.array([Lambda_dot[ieq], 0.0, Lambda_dot[ieq+1]])
elif ((n2 < n0) and (n2 < n1)):
# indep = np.array([0,1], dtype = int)
indep[:] = [0, 1]
# new_Lambda_dot = np.array([Lambda_dot[ieq], Lambda_dot[ieq+1], 0.0])
new_Lambda_dot = np.zeros(3)
new_Lambda_dot[indep[0]] = Lambda_dot[ieq]
new_Lambda_dot[indep[1]] = Lambda_dot[ieq+1]
num_LM_eq_specific = 2
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
# Lambda_dot[ieq:ieq+num_LM_eq_specific]
# np.concatenate((Lambda_dot[ieq:ieq+num_LM_eq_specific], np.array([0.])))
# print(indep)
Bnh[:, FoR_dof+3:FoR_dof+6] = algebra.multiply_matrices(algebra.skew(rot_axisB),
algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]).T,
algebra.quat2rotation(MB_tstep[node_body].quat).T,
algebra.quat2rotation(MB_tstep[FoR_body].quat))[indep,:]
# Constrain angular velocities
LM_Q[:sys_size] += scalingFactor*np.dot(np.transpose(Bnh),Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_Q[sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += algebra.multiply_matrices(algebra.skew(rot_axisB),
algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]).T,
algebra.quat2rotation(MB_tstep[node_body].quat).T,
algebra.quat2rotation(MB_tstep[FoR_body].quat),
MB_tstep[FoR_body].for_vel[3:6])[indep]
LM_C[sys_size+ieq:sys_size+ieq+num_LM_eq_specific,:sys_size] += scalingFactor*Bnh
LM_C[:sys_size,sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += scalingFactor*np.transpose(Bnh)
if MB_beam[node_body].FoR_movement == 'free':
LM_C[FoR_dof+3:FoR_dof+6,node_FoR_dof+6:node_FoR_dof+10] += np.dot(algebra.quat2rotation(MB_tstep[FoR_body].quat).T,
algebra.der_Cquat_by_v(MB_tstep[node_body].quat,
algebra.multiply_matrices(algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]),
algebra.skew(rot_axisB).T,
new_Lambda_dot)))
LM_C[FoR_dof+3:FoR_dof+6,FoR_dof+6:FoR_dof+10] += algebra.der_CquatT_by_v(MB_tstep[FoR_body].quat,
algebra.multiply_matrices(algebra.quat2rotation(MB_tstep[node_body].quat),
algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]).T,
algebra.skew(rot_axisB).T,
new_Lambda_dot))
LM_K[FoR_dof+3:FoR_dof+6,node_dof+3:node_dof+6] += algebra.multiply_matrices(algebra.quat2rotation(MB_tstep[FoR_body].quat).T,
algebra.quat2rotation(MB_tstep[node_body].quat),
algebra.der_Ccrv_by_v(MB_tstep[node_body].psi[ielem,inode_in_elem,:],
np.dot(algebra.skew(rot_axisB).T,
new_Lambda_dot)))
ieq += 2
return ieq
def def_rot_vel_FoR_wrt_node(MB_tstep, MB_beam, FoR_body, node_body, node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, rot_axisB, rot_vel, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q):
# Variables names. The naming of the variables can be quite confusing. The reader should think that
# the BC relates one "node" and one "FoR" (writen between quotes in these lines).
# If a variable is related to one of them starts with "node_" or "FoR_" respectively
# node_number: number of the "node" within its own body
# node_body: body number of the "node"
# node_FoR_dof: position of the first degree of freedom of the FoR to which the "node" belongs
# node_dof: position of the first degree of freedom associated to the "node"
# FoR_body: body number of the "FoR"
# FoR_dof: position of the first degree of freedom associated to the "FoR"
num_LM_eq_specific = 1
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
# Lambda_dot[ieq:ieq+num_LM_eq_specific]
# np.concatenate((Lambda_dot[ieq:ieq+num_LM_eq_specific], np.array([0.])))
ielem, inode_in_elem = MB_beam[node_body].node_master_elem[node_number]
Bnh[:, FoR_dof+3:FoR_dof+6] = algebra.multiply_matrices(rot_axisB,
algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]).T,
algebra.quat2rotation(MB_tstep[node_body].quat).T,
algebra.quat2rotation(MB_tstep[FoR_body].quat))
# Constrain angular velocities
LM_Q[:sys_size] += scalingFactor*np.dot(np.transpose(Bnh),Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_Q[sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += algebra.multiply_matrices(rot_axisB,
algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]).T,
algebra.quat2rotation(MB_tstep[node_body].quat).T,
algebra.quat2rotation(MB_tstep[FoR_body].quat),
MB_tstep[FoR_body].for_vel[3:6]) - rot_vel
LM_C[sys_size+ieq:sys_size+ieq+num_LM_eq_specific,:sys_size] += scalingFactor*Bnh
LM_C[:sys_size,sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += scalingFactor*np.transpose(Bnh)
if MB_beam[node_body].FoR_movement == 'free':
LM_C[FoR_dof+3:FoR_dof+6,node_FoR_dof+6:node_FoR_dof+10] += np.dot(algebra.quat2rotation(MB_tstep[FoR_body].quat).T,
algebra.der_Cquat_by_v(MB_tstep[node_body].quat,
algebra.multiply_matrices(algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]),
# rot_axisB.T,
rot_axisB.T*Lambda_dot[ieq:ieq+num_LM_eq_specific])))
LM_C[FoR_dof+3:FoR_dof+6,FoR_dof+6:FoR_dof+10] += algebra.der_CquatT_by_v(MB_tstep[FoR_body].quat,
algebra.multiply_matrices(algebra.quat2rotation(MB_tstep[node_body].quat),
algebra.crv2rotation(MB_tstep[node_body].psi[ielem,inode_in_elem,:]).T,
rot_axisB.T*Lambda_dot[ieq:ieq+num_LM_eq_specific]))
LM_K[FoR_dof+3:FoR_dof+6,node_dof+3:node_dof+6] += algebra.multiply_matrices(algebra.quat2rotation(MB_tstep[FoR_body].quat).T,
algebra.quat2rotation(MB_tstep[node_body].quat),
algebra.der_Ccrv_by_v(MB_tstep[node_body].psi[ielem,inode_in_elem,:],
rot_axisB.T*Lambda_dot[ieq:ieq+num_LM_eq_specific]))
ieq += 1
return ieq
################################################################################
# Lagrange constraints
################################################################################
@lagrangeconstraint
class SampleLagrange(BaseLagrangeConstraint):
_lc_id = 'SampleLagrange'
def __init__(self):
self._n_eq = 3
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return np.zeros((6, 6))
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return np.zeros((10, 10))
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
@lagrangeconstraint
class hinge_node_FoR(BaseLagrangeConstraint):
_lc_id = 'hinge_node_FoR'
def __init__(self):
self.required_parameters = ['node_in_body', 'body', 'body_FoR', 'rot_axisB']
self._n_eq = 5
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.node_number = MBdict_entry['node_in_body']
self.node_body = MBdict_entry['body']
self.FoR_body = MBdict_entry['body_FoR']
self.rot_axisB = MBdict_entry['rot_axisB']
self._ieq = ieq
self.indep = []
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
# Define the position of the first degree of freedom associated to the node
node_dof = define_node_dof(MB_beam, self.node_body, self.node_number)
node_FoR_dof = define_FoR_dof(MB_beam, self.node_body)
FoR_dof = define_FoR_dof(MB_beam, self.FoR_body)
ieq = self._ieq
# Define the equations
ieq = equal_lin_vel_node_FoR(MB_tstep, MB_beam, self.FoR_body, self.node_body, self.node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q)
ieq = def_rot_axis_FoR_wrt_node(MB_tstep, MB_beam, self.FoR_body, self.node_body, self.node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, self.rot_axisB, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q, self.indep)
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
MB_tstep[self.FoR_body].for_pos[0:3] = np.dot(algebra.quat2rotation(MB_tstep[self.node_body].quat), MB_tstep[self.node_body].pos[self.node_number,:]) + MB_tstep[self.node_body].for_pos[0:3]
return
@lagrangeconstraint
class hinge_node_FoR_constant_vel(BaseLagrangeConstraint):
_lc_id = 'hinge_node_FoR_constant_vel'
def __init__(self):
self.required_parameters = ['node_in_body', 'body', 'body_FoR', 'rot_axisB', 'rot_vel']
self._n_eq = 6
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.node_number = MBdict_entry['node_in_body']
self.node_body = MBdict_entry['body']
self.FoR_body = MBdict_entry['body_FoR']
self.rot_axisB = MBdict_entry['rot_axisB']
self.rot_vel = MBdict_entry['rot_vel']
self._ieq = ieq
self.indep = []
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
# Define the position of the first degree of freedom associated to the node
node_dof = define_node_dof(MB_beam, self.node_body, self.node_number)
node_FoR_dof = define_FoR_dof(MB_beam, self.node_body)
FoR_dof = define_FoR_dof(MB_beam, self.FoR_body)
ieq = self._ieq
# Define the equations
ieq = equal_lin_vel_node_FoR(MB_tstep, MB_beam, self.FoR_body, self.node_body, self.node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q)
ieq = def_rot_axis_FoR_wrt_node(MB_tstep, MB_beam, self.FoR_body, self.node_body, self.node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, self.rot_axisB, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q, self.indep)
ieq = def_rot_vel_FoR_wrt_node(MB_tstep, MB_beam, self.FoR_body, self.node_body, self.node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, self.rot_axisB, self.rot_vel, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q)
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
MB_tstep[self.FoR_body].for_pos[0:3] = np.dot(algebra.quat2rotation(MB_tstep[self.node_body].quat), MB_tstep[self.node_body].pos[self.node_number,:]) + MB_tstep[self.node_body].for_pos[0:3]
return
@lagrangeconstraint
class spherical_node_FoR(BaseLagrangeConstraint):
_lc_id = 'spherical_node_FoR'
def __init__(self):
self.required_parameters = ['node_in_body', 'body', 'body_FoR']
self._n_eq = 3
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.node_number = MBdict_entry['node_in_body']
self.node_body = MBdict_entry['body']
self.FoR_body = MBdict_entry['body_FoR']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
# Define the position of the first degree of freedom associated to the node
node_dof = define_node_dof(MB_beam, self.node_body, self.node_number)
node_FoR_dof = define_FoR_dof(MB_beam, self.node_body)
FoR_dof = define_FoR_dof(MB_beam, self.FoR_body)
ieq = self._ieq
# Define the equations
ieq = equal_lin_vel_node_FoR(MB_tstep, MB_beam, self.FoR_body, self.node_body, self.node_number, node_FoR_dof, node_dof, FoR_dof, sys_size, Lambda_dot, scalingFactor, penaltyFactor, ieq, LM_K, LM_C, LM_Q)
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
MB_tstep[self.FoR_body].for_pos[0:3] = np.dot(algebra.quat2rotation(MB_tstep[self.node_body].quat), MB_tstep[self.node_body].pos[self.node_number,:]) + MB_tstep[self.node_body].for_pos[0:3]
return
@lagrangeconstraint
class free(BaseLagrangeConstraint):
_lc_id = 'free'
def __init__(self):
self.required_parameters = []
self._n_eq = 0
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
@lagrangeconstraint
class spherical_FoR(BaseLagrangeConstraint):
_lc_id = 'spherical_FoR'
def __init__(self):
self.required_parameters = ['body_FoR']
self._n_eq = 3
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.body_FoR = MBdict_entry['body_FoR']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
num_LM_eq_specific = self._n_eq
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
# Define the position of the first degree of freedom associated to the FoR
FoR_dof = define_FoR_dof(MB_beam, self.body_FoR)
ieq = self._ieq
Bnh[:3, FoR_dof:FoR_dof+3] = 1.0*np.eye(3)
LM_C[sys_size+ieq:sys_size+ieq+num_LM_eq_specific,:sys_size] += scalingFactor*Bnh
LM_C[:sys_size,sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += scalingFactor*np.transpose(Bnh)
LM_Q[:sys_size] += scalingFactor*np.dot(np.transpose(Bnh),Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_Q[sys_size+ieq:sys_size+ieq+3] += MB_tstep[self.body_FoR].for_vel[0:3].astype(dtype=ct.c_double, copy=True, order='F')
ieq += 3
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
@lagrangeconstraint
class hinge_FoR(BaseLagrangeConstraint):
_lc_id = 'hinge_FoR'
def __init__(self):
self.required_parameters = ['body_FoR', 'rot_axis_AFoR']
self._n_eq = 5
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.body_FoR = MBdict_entry['body_FoR']
self.rot_axis = MBdict_entry['rot_axis_AFoR']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
num_LM_eq_specific = self._n_eq
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
# Define the position of the first degree of freedom associated to the FoR
FoR_dof = define_FoR_dof(MB_beam, self.body_FoR)
ieq = self._ieq
Bnh[:3, FoR_dof:FoR_dof+3] = 1.0*np.eye(3)
# Only two of these equations are linearly independent
skew_rot_axis = algebra.skew(self.rot_axis)
n0 = np.linalg.norm(skew_rot_axis[0,:])
n1 = np.linalg.norm(skew_rot_axis[1,:])
n2 = np.linalg.norm(skew_rot_axis[2,:])
if ((n0 < n1) and (n0 < n2)):
row0 = 1
row1 = 2
elif ((n1 < n0) and (n1 < n2)):
row0 = 0
row1 = 2
elif ((n2 < n0) and (n2 < n1)):
row0 = 0
row1 = 1
Bnh[3:5, FoR_dof+3:FoR_dof+6] = skew_rot_axis[[row0,row1],:]
LM_C[sys_size+ieq:sys_size+ieq+num_LM_eq_specific,:sys_size] += scalingFactor*Bnh
LM_C[:sys_size,sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += scalingFactor*np.transpose(Bnh)
LM_Q[:sys_size] += scalingFactor*np.dot(np.transpose(Bnh),Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_Q[sys_size+ieq:sys_size+ieq+3] += MB_tstep[self.body_FoR].for_vel[0:3].astype(dtype=ct.c_double, copy=True, order='F')
LM_Q[sys_size+ieq+3:sys_size+ieq+5] += np.dot(skew_rot_axis[[row0,row1],:], MB_tstep[self.body_FoR].for_vel[3:6])
ieq += 5
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
@lagrangeconstraint
class hinge_FoR_wrtG(BaseLagrangeConstraint):
_lc_id = 'hinge_FoR_wrtG'
def __init__(self):
self.required_parameters = ['body_FoR', 'rot_axis_AFoR']
self._n_eq = 5
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.body_FoR = MBdict_entry['body_FoR']
self.rot_axis = MBdict_entry['rot_axis_AFoR']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
num_LM_eq_specific = self._n_eq
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
# Define the position of the first degree of freedom associated to the FoR
FoR_dof = define_FoR_dof(MB_beam, self.body_FoR)
ieq = self._ieq
Bnh[:3, FoR_dof:FoR_dof+3] = algebra.quat2rotation(MB_tstep[self.body_FoR].quat)
# Only two of these equations are linearly independent
skew_rot_axis = algebra.skew(self.rot_axis)
n0 = np.linalg.norm(skew_rot_axis[0,:])
n1 = np.linalg.norm(skew_rot_axis[1,:])
n2 = np.linalg.norm(skew_rot_axis[2,:])
if ((n0 < n1) and (n0 < n2)):
row0 = 1
row1 = 2
elif ((n1 < n0) and (n1 < n2)):
row0 = 0
row1 = 2
elif ((n2 < n0) and (n2 < n1)):
row0 = 0
row1 = 1
Bnh[3:5, FoR_dof+3:FoR_dof+6] = skew_rot_axis[[row0,row1],:]
LM_C[sys_size+ieq:sys_size+ieq+num_LM_eq_specific,:sys_size] += scalingFactor*Bnh
LM_C[:sys_size,sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += scalingFactor*np.transpose(Bnh)
LM_C[FoR_dof:FoR_dof+3,FoR_dof+6:FoR_dof+10] += algebra.der_CquatT_by_v(MB_tstep[self.body_FoR].quat,Lambda_dot[ieq:ieq+3])
LM_Q[:sys_size] += scalingFactor*np.dot(np.transpose(Bnh),Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_Q[sys_size+ieq:sys_size+ieq+3] += np.dot(algebra.quat2rotation(MB_tstep[self.body_FoR].quat),MB_tstep[self.body_FoR].for_vel[0:3])
LM_Q[sys_size+ieq+3:sys_size+ieq+5] += np.dot(skew_rot_axis[[row0,row1],:], MB_tstep[self.body_FoR].for_vel[3:6])
ieq += 5
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
@lagrangeconstraint
class fully_constrained_node_FoR(BaseLagrangeConstraint):
_lc_id = 'fully_constrained_node_FoR'
def __init__(self):
self.required_parameters = ['node_in_body', 'body', 'body_FoR']
self._n_eq = 6
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
cout.cout_wrap("WARNING: do not use fully_constrained_node_FoR. It is outdated", 3)
self.node_number = MBdict_entry['node_in_body']
self.node_body = MBdict_entry['body']
self.FoR_body = MBdict_entry['body_FoR']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
num_LM_eq_specific = self._n_eq
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
node_dof = define_node_dof(MB_beam, self.node_body, self.node_number)
FoR_dof = define_FoR_dof(MB_beam, self.FoR_body)
ieq = self._ieq
# Option with non holonomic constraints
# BC for linear velocities
Bnh[:3, node_dof:node_dof+3] = -1.0*np.eye(3)
quat = algebra.quat_bound(MB_tstep[self.FoR_body].quat)
Bnh[:3, FoR_dof:FoR_dof+3] = algebra.quat2rotation(quat)
# BC for angular velocities
Bnh[3:6,FoR_dof+3:FoR_dof+6] = -1.0*algebra.quat2rotation(quat)
ielem, inode_in_elem = MB_beam[0].node_master_elem[self.node_number]
Bnh[3:6,node_dof+3:node_dof+6] = algebra.crv2tan(MB_tstep[0].psi[ielem, inode_in_elem, :])
LM_C[sys_size+ieq:sys_size+ieq+num_LM_eq_specific,:sys_size] += scalingFactor*Bnh
LM_C[:sys_size,sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += scalingFactor*np.transpose(Bnh)
LM_Q[:sys_size] += scalingFactor*np.dot(np.transpose(Bnh),Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_Q[sys_size+ieq:sys_size+ieq+3] += -MB_tstep[0].pos_dot[-1,:] + np.dot(algebra.quat2rotation(quat),MB_tstep[1].for_vel[0:3])
LM_Q[sys_size+ieq+3:sys_size+ieq+6] += (np.dot(algebra.crv2tan(MB_tstep[0].psi[ielem, inode_in_elem, :]),MB_tstep[0].psi_dot[ielem, inode_in_elem, :]) -
np.dot(algebra.quat2rotation(quat), MB_tstep[self.FoR_body].for_vel[3:6]))
#LM_K[FoR_dof:FoR_dof+3,FoR_dof+6:FoR_dof+10] = algebra.der_CquatT_by_v(MB_tstep[body_FoR].quat,Lambda_dot)
LM_C[FoR_dof:FoR_dof+3,FoR_dof+6:FoR_dof+10] += algebra.der_CquatT_by_v(quat,scalingFactor*Lambda_dot[ieq:ieq+3])
LM_C[FoR_dof+3:FoR_dof+6,FoR_dof+6:FoR_dof+10] -= algebra.der_CquatT_by_v(quat,scalingFactor*Lambda_dot[ieq+3:ieq+6])
LM_K[node_dof+3:node_dof+6,node_dof+3:node_dof+6] += algebra.der_TanT_by_xv(MB_tstep[0].psi[ielem, inode_in_elem, :],scalingFactor*Lambda_dot[ieq+3:ieq+6])
ieq += 6
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
MB_tstep[self.FoR_body].for_pos[0:3] = np.dot(algebra.quat2rotation(MB_tstep[self.node_body].quat), MB_tstep[self.node_body].pos[self.node_number,:]) + MB_tstep[self.node_body].for_pos[0:3]
return
# @lagrangeconstraint
# class hinge_node_FoR_constant_rotation(BaseLagrangeConstraint):
# _lc_id = 'hinge_node_FoR_constant_rotation'
#
# def __init__(self):
# self._n_eq = 4
#
# def get_n_eq(self):
# return self._n_eq
#
# def initialise(self, MBdict_entry, ieq):
# print('Type of LC: ', self._lc_id)
# print('Arguments and values:')
# for k, v in MBdict_entry.items():
# print(k, v)
#
# self._ieq = ieq
# return self._ieq + self._n_eq
#
# def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
# sys_size, dt, Lambda, Lambda_dot,
# scalingFactor, penaltyFactor):
# return
#
# def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
# sys_size, dt, Lambda, Lambda_dot,
# scalingFactor, penaltyFactor):
# return
#
# def staticpost(self, lc_list, MB_beam, MB_tstep):
# return
#
# def dynamicpost(self, lc_list, MB_beam, MB_tstep):
# return
@lagrangeconstraint
class constant_rot_vel_FoR(BaseLagrangeConstraint):
_lc_id = 'constant_rot_vel_FoR'
def __init__(self):
self.required_parameters = ['FoR_body', 'rot_vel']
self._n_eq = 3
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.rot_vel = MBdict_entry['rot_vel']
self.FoR_body = MBdict_entry['FoR_body']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
num_LM_eq_specific = self._n_eq
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order = 'F')
# Define the position of the first degree of freedom associated to the FoR
FoR_dof = define_FoR_dof(MB_beam, self.FoR_body)
ieq = self._ieq
Bnh[:3,FoR_dof+3:FoR_dof+6] = np.eye(3)
LM_C[sys_size+ieq:sys_size+ieq+num_LM_eq_specific,:sys_size] += scalingFactor*Bnh
LM_C[:sys_size,sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += scalingFactor*np.transpose(Bnh)
LM_Q[:sys_size] += scalingFactor*np.dot(np.transpose(Bnh),Lambda_dot[ieq:ieq+num_LM_eq_specific])
LM_Q[sys_size+ieq:sys_size+ieq+num_LM_eq_specific] += MB_tstep[self.FoR_body].for_vel[3:6] - self.rot_vel
ieq += 3
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
@lagrangeconstraint
class constant_vel_FoR(BaseLagrangeConstraint):
_lc_id = 'constant_vel_FoR'
def __init__(self):
self.required_parameters = ['FoR_body', 'vel']
self._n_eq = 6
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.vel = MBdict_entry['vel']
self.FoR_body = MBdict_entry['FoR_body']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
num_LM_eq_specific = self._n_eq
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order='F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order='F')
# Define the position of the first degree of freedom associated to the FoR
FoR_dof = define_FoR_dof(MB_beam, self.FoR_body)
ieq = self._ieq
Bnh[:num_LM_eq_specific, FoR_dof:FoR_dof+6] = np.eye(6)
LM_C[sys_size + ieq:sys_size + ieq + num_LM_eq_specific, :sys_size] += scalingFactor * Bnh
LM_C[:sys_size, sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += scalingFactor * np.transpose(Bnh)
LM_Q[:sys_size] += scalingFactor * np.dot(np.transpose(Bnh), Lambda_dot[ieq:ieq + num_LM_eq_specific])
LM_Q[sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += MB_tstep[self.FoR_body].for_vel - self.vel
ieq += 6
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
@lagrangeconstraint
class lin_vel_node_wrtA(BaseLagrangeConstraint):
_lc_id = 'lin_vel_node_wrtA'
def __init__(self):
self.required_parameters = ['velocity', 'body_number', 'node_number']
self._n_eq = 3
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ', self._lc_id)
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(k, v)
self.vel = MBdict_entry['velocity']
self.body_number = MBdict_entry['body_number']
self.node_number = MBdict_entry['node_number']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
num_LM_eq_specific = self._n_eq
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order='F')
# Define the position of the first degree of freedom associated to the FoR
# FoR_dof = define_FoR_dof(MB_beam, self.body_number)
node_dof = define_node_dof(MB_beam, self.body_number, self.node_number)
ieq = self._ieq
B[:num_LM_eq_specific, node_dof:node_dof+3] = np.eye(3)
LM_K[sys_size + ieq:sys_size + ieq + num_LM_eq_specific, :sys_size] += scalingFactor * B
LM_K[:sys_size, sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += scalingFactor * np.transpose(B)
LM_Q[:sys_size] += scalingFactor * np.dot(np.transpose(B), Lambda[ieq:ieq + num_LM_eq_specific])
LM_Q[sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += MB_tstep[self.body_number].pos[self.node_number,:] - MB_beam[self.body_number].ini_info.pos[self.node_number,:]
ieq += 3
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
if len(self.vel.shape) > 1:
current_vel = self.vel[ts-1, :]
else:
current_vel = self.vel
num_LM_eq_specific = self._n_eq
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order='F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order='F')
# Define the position of the first degree of freedom associated to the FoR
# FoR_dof = define_FoR_dof(MB_beam, self.body_number)
node_dof = define_node_dof(MB_beam, self.body_number, self.node_number)
ieq = self._ieq
Bnh[:num_LM_eq_specific, node_dof:node_dof+3] = np.eye(3)
LM_C[sys_size + ieq:sys_size + ieq + num_LM_eq_specific, :sys_size] += scalingFactor * Bnh
LM_C[:sys_size, sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += scalingFactor * np.transpose(Bnh)
LM_Q[:sys_size] += scalingFactor * np.dot(np.transpose(Bnh), Lambda_dot[ieq:ieq + num_LM_eq_specific])
LM_Q[sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += MB_tstep[self.body_number].pos_dot[self.node_number,:] - current_vel
ieq += 3
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
@lagrangeconstraint
class lin_vel_node_wrtG(BaseLagrangeConstraint):
_lc_id = 'lin_vel_node_wrtG'
def __init__(self):
self.required_parameters = ['velocity', 'body_number', 'node_number']
self._n_eq = 3
def get_n_eq(self):
return self._n_eq
def initialise(self, MBdict_entry, ieq, print_info=True):
# if print_info:
# cout.cout_wrap('Type of LC: ' + str(self._lc_id))
# cout.cout_wrap('Arguments and values:')
# for k, v in MBdict_entry.items():
# cout.cout_wrap(str(k) + str(v))
self.vel = MBdict_entry['velocity']
self.body_number = MBdict_entry['body_number']
self.node_number = MBdict_entry['node_number']
self._ieq = ieq
return self._ieq + self._n_eq
def staticmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
num_LM_eq_specific = self._n_eq
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order='F')
# Define the position of the first degree of freedom associated to the FoR
# FoR_dof = define_FoR_dof(MB_beam, self.body_number)
node_dof = define_node_dof(MB_beam, self.body_number, self.node_number)
ieq = self._ieq
B[:num_LM_eq_specific, node_dof:node_dof+3] = algebra.quat2rotation(MB_tstep[self.body_number].quat)
LM_K[sys_size + ieq:sys_size + ieq + num_LM_eq_specific, :sys_size] += scalingFactor * B
LM_K[:sys_size, sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += scalingFactor * np.transpose(B)
LM_Q[:sys_size] += scalingFactor * np.dot(np.transpose(B), Lambda[ieq:ieq + num_LM_eq_specific])
LM_Q[sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += (np.dot(algebra.quat2rotation(MB_tstep[self.body_number].quat), MB_tstep[self.body_number].pos[self.node_number,:]) +
MB_tstep[self.body_number].for_pos)
LM_Q[sys_size + ieq:sys_size + ieq + num_LM_eq_specific] -= (np.dot(algebra.quat2rotation(MB_beam[self.body_number].ini_info.quat), MB_beam[self.body_number].ini_info.pos[self.node_number,:]) +
MB_beam[self.body_number].ini_info.for_pos)
ieq += 3
return
def dynamicmat(self, LM_C, LM_K, LM_Q, MB_beam, MB_tstep, ts, num_LM_eq,
sys_size, dt, Lambda, Lambda_dot,
scalingFactor, penaltyFactor):
if len(self.vel.shape) > 1:
current_vel = self.vel[ts-1, :]
else:
current_vel = self.vel
num_LM_eq_specific = self._n_eq
Bnh = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order='F')
B = np.zeros((num_LM_eq_specific, sys_size), dtype=ct.c_double, order='F')
# Define the position of the first degree of freedom associated to the FoR
FoR_dof = define_FoR_dof(MB_beam, self.body_number)
node_dof = define_node_dof(MB_beam, self.body_number, self.node_number)
ieq = self._ieq
if MB_beam[self.body_number].FoR_movement == 'free':
Bnh[:num_LM_eq_specific, FoR_dof:FoR_dof+3] = algebra.quat2rotation(MB_tstep[self.body_number].quat)
Bnh[:num_LM_eq_specific, FoR_dof+3:FoR_dof+6] = -np.dot(algebra.quat2rotation(MB_tstep[self.body_number].quat), algebra.skew(MB_tstep[self.body_number].pos[self.node_number,:]))
Bnh[:num_LM_eq_specific, node_dof:node_dof+3] = algebra.quat2rotation(MB_tstep[self.body_number].quat)
LM_C[sys_size + ieq:sys_size + ieq + num_LM_eq_specific, :sys_size] += scalingFactor * Bnh
LM_C[:sys_size, sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += scalingFactor * np.transpose(Bnh)
if MB_beam[self.body_number].FoR_movement == 'free':
LM_C[FoR_dof:FoR_dof+3, FoR_dof+6:FoR_dof+10] += algebra.der_CquatT_by_v(MB_tstep[self.body_number].quat,Lambda_dot[ieq:ieq + num_LM_eq_specific])
LM_C[node_dof:node_dof+3, FoR_dof+6:FoR_dof+10] += algebra.der_CquatT_by_v(MB_tstep[self.body_number].quat,Lambda_dot[ieq:ieq + num_LM_eq_specific])
LM_C[FoR_dof+3:FoR_dof+6, FoR_dof+6:FoR_dof+10] += np.dot(algebra.skew(MB_tstep[self.body_number].pos[self.node_number,:]), algebra.der_CquatT_by_v(MB_tstep[self.body_number].quat,Lambda_dot[ieq:ieq + num_LM_eq_specific]))
LM_K[FoR_dof+3:FoR_dof+6, node_dof:node_dof+3] -= algebra.skew(np.dot(algebra.quat2rotation(MB_tstep[self.body_number].quat).T, Lambda_dot[ieq:ieq + num_LM_eq_specific]))
LM_Q[:sys_size] += scalingFactor * np.dot(np.transpose(Bnh), Lambda_dot[ieq:ieq + num_LM_eq_specific])
LM_Q[sys_size + ieq:sys_size + ieq + num_LM_eq_specific] += (np.dot( algebra.quat2rotation(MB_tstep[self.body_number].quat), (
MB_tstep[self.body_number].for_vel[0:3] +
np.dot(algebra.skew(MB_tstep[self.body_number].for_vel[3:6]), MB_tstep[self.body_number].pos[self.node_number,:]) +
MB_tstep[self.body_number].pos_dot[self.node_number,:])) -
current_vel)
ieq += 3
return
def staticpost(self, lc_list, MB_beam, MB_tstep):
return
def dynamicpost(self, lc_list, MB_beam, MB_tstep):
return
################################################################################
# Funtions to interact with this Library
################################################################################
def initialize_constraints(MBdict):
index_eq = 0
num_constraints = MBdict['num_constraints']
lc_list = list()
# Read the dictionary and create the constraints
for iconstraint in range(num_constraints):
lc_list.append(lc_from_string(MBdict["constraint_%02d" % iconstraint]['behaviour'])())
index_eq = lc_list[-1].initialise(MBdict["constraint_%02d" % iconstraint], index_eq)
return lc_list
def define_num_LM_eq(lc_list):
"""
define_num_LM_eq
Define the number of equations needed to define the boundary boundary conditions
Args:
lc_list(): list of all the defined contraints
Returns:
num_LM_eq(int): number of new equations needed to define the boundary boundary conditions
Examples:
num_LM_eq = lagrangeconstraints.define_num_LM_eq(lc_list)
Notes:
"""
num_LM_eq = 0
# Compute the number of equations
for lc in lc_list:
num_LM_eq += lc.get_n_eq()
return num_LM_eq
def generate_lagrange_matrix(lc_list, MB_beam, MB_tstep, ts, num_LM_eq, sys_size, dt, Lambda, Lambda_dot, dynamic_or_static):
"""
generate_lagrange_matrix
Generates the matrices associated to the Lagrange multipliers boundary conditions
Args:
lc_list(): list of all the defined contraints
MBdict(MBdict): dictionary with the MultiBody and LagrangeMultipliers information
MB_beam(list): list of 'beams' of each of the bodies that form the system
MB_tstep(list): list of 'StructTimeStepInfo' of each of the bodies that form the system
num_LM_eq(int): number of new equations needed to define the boundary boundary conditions
sys_size(int): total number of degrees of freedom of the multibody system
dt(float): time step
Lambda(numpy array): list of Lagrange multipliers values
Lambda_dot(numpy array): list of the first derivative of the Lagrange multipliers values
dynamic_or_static (str): string defining if the computation is dynamic or static
Returns:
LM_C (numpy array): Damping matrix associated to the Lagrange Multipliers equations
LM_K (numpy array): Stiffness matrix associated to the Lagrange Multipliers equations
LM_Q (numpy array): Vector of independent terms associated to the Lagrange Multipliers equations
Examples:
Notes:
"""
# Lagrange multipliers parameters
# TODO: set them as an input variable (at this point they should not be changed)
penaltyFactor = 0.0
scalingFactor = 1.0
# Initialize matrices
LM_C = np.zeros((sys_size + num_LM_eq,sys_size + num_LM_eq), dtype=ct.c_double, order = 'F')
LM_K = np.zeros((sys_size + num_LM_eq,sys_size + num_LM_eq), dtype=ct.c_double, order = 'F')
LM_Q = np.zeros((sys_size + num_LM_eq,),dtype=ct.c_double, order = 'F')
# Define the matrices associated to the constratints
# TODO: Is there a better way to deal with ieq?
# ieq = 0
for lc in lc_list:
if dynamic_or_static.lower() == "static":
lc.staticmat(LM_C=LM_C,
LM_K=LM_K,
LM_Q=LM_Q,
# MBdict=MBdict,
MB_beam=MB_beam,
MB_tstep=MB_tstep,
ts=ts,
num_LM_eq=num_LM_eq,
sys_size=sys_size,
dt=dt,
Lambda=Lambda,
Lambda_dot=Lambda_dot,
# ieq=ieq,
scalingFactor=scalingFactor,
penaltyFactor=penaltyFactor)
elif dynamic_or_static.lower() == "dynamic":
lc.dynamicmat(LM_C=LM_C,
LM_K=LM_K,
LM_Q=LM_Q,
# MBdict=MBdict,
MB_beam=MB_beam,
MB_tstep=MB_tstep,
ts=ts,
num_LM_eq=num_LM_eq,
sys_size=sys_size,
dt=dt,
Lambda=Lambda,
Lambda_dot=Lambda_dot,
# ieq=ieq,
scalingFactor=scalingFactor,
penaltyFactor=penaltyFactor)
return LM_C, LM_K, LM_Q
def postprocess(lc_list, MB_beam, MB_tstep, dynamic_or_static):
for lc in lc_list:
if dynamic_or_static.lower() == "static":
lc.staticpost(lc_list = lc_list,
MB_beam = MB_beam,
MB_tstep = MB_tstep)
# MBdict = MBdict)
elif dynamic_or_static.lower() == "dynamic":
lc.dynamicpost(lc_list = lc_list,
MB_beam = MB_beam,
MB_tstep = MB_tstep)
# MBdict = MBdict)
return
def remove_constraint(MBdict, constraint):
try:
del(MBdict[constraint])
MBdict['num_constraints'] -= 1
except KeyError:
# The entry did not exist in the dict, pass without substracting 1 to
# num_constraints
pass
################################################################################
################################################################################
################################################################################
# this at the end of the file
print_available_lc()
# test
# if __name__ == '__main__':
# lc_list = list()
# lc_list.append(lc_from_string('SampleLagrange')())
# lc_list.append(lc_from_string('SampleLagrange')())
# counter = -1
# for lc in lc_list:
# counter += 1
# lc.initialise(counter=counter)
| 42.704286
| 244
| 0.611949
| 8,531
| 59,786
| 3.962724
| 0.040792
| 0.041206
| 0.032716
| 0.048364
| 0.88508
| 0.869786
| 0.849849
| 0.832515
| 0.818228
| 0.798823
| 0
| 0.010493
| 0.268307
| 59,786
| 1,399
| 245
| 42.734811
| 0.762304
| 0.174807
| 0
| 0.736976
| 0
| 0
| 0.022998
| 0.001667
| 0
| 0
| 0
| 0.000715
| 0
| 1
| 0.143583
| false
| 0.008895
| 0.007624
| 0.062262
| 0.301144
| 0.021601
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f618c098980381f95c5d53e0b26c6eb7c6c8e32c
| 89
|
py
|
Python
|
file_validator/schema/__init__.py
|
sujavarghese/data-validator
|
e0c5d94da797cb43b17d6ee193d337cbcb602f49
|
[
"MIT"
] | null | null | null |
file_validator/schema/__init__.py
|
sujavarghese/data-validator
|
e0c5d94da797cb43b17d6ee193d337cbcb602f49
|
[
"MIT"
] | null | null | null |
file_validator/schema/__init__.py
|
sujavarghese/data-validator
|
e0c5d94da797cb43b17d6ee193d337cbcb602f49
|
[
"MIT"
] | null | null | null |
from file_validator.schema.generator import *
from file_validator.schema.schema import *
| 29.666667
| 45
| 0.842697
| 12
| 89
| 6.083333
| 0.5
| 0.219178
| 0.465753
| 0.630137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 89
| 2
| 46
| 44.5
| 0.901235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f65a85603b0fc72a82949b51a831409d35b305ae
| 10,045
|
py
|
Python
|
src/IceRayPy/core/geometry/simple.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 2
|
2020-09-04T12:27:15.000Z
|
2022-01-17T14:49:40.000Z
|
src/IceRayPy/core/geometry/simple.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | null | null | null |
src/IceRayPy/core/geometry/simple.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 1
|
2020-09-04T12:27:52.000Z
|
2020-09-04T12:27:52.000Z
|
import ctypes
print( '<' + __name__ + ' name=\'' + __file__ + '\''+ '>' )
import IceRayPy.type
import IceRayPy.type.math
import IceRayPy.type.math.coord
import IceRayPy.core.geometry
Pointer = ctypes.POINTER
AddresOf = ctypes.addressof
Scalar = IceRayPy.type.basic.Scalar
VoidPtr = IceRayPy.type.basic.VoidPtr
Integer = IceRayPy.type.basic.Integer
Coord3D = IceRayPy.type.math.coord.Scalar3D
class Sphere : #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll, P_center = None , P_radius = None ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Sphere0()
if None != P_center:
self.center( P_center )
if None != P_radius:
self.radius( P_radius )
return
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def center( self, P_center : Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Sphere_Center( self.m_cargo['this'], AddresOf( P_center ) )
def radius( self, P_radius ):
self.m_cargo['dll'].IceRayC_Geometry_Sphere_Radius( self.m_cargo['this'], Scalar( P_radius ) )
class Box: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll, P_lo = None , P_hi = None ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Box0()
if None != P_lo:
self.lo( P_lo )
if None != P_hi:
self.hi( P_hi )
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def lo( self, P_lo: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Box_Lo( self.m_cargo['this'], AddresOf( P_lo ) )
def hi( self, P_hi: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Box_Hi( self.m_cargo['this'], AddresOf( P_hi ) )
def box( self, P_lo: Coord3D, P_hi: Coord3D ):
self.lo( P_lo )
self.hi( P_hi )
class Cone: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Cone0()
# TODO lo hi
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
class Cylinder: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Cylinder0()
# TODO lo hi, radius
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
class Disc: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Disc0()
# TODO lo hi
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def radius( self, P_radius ):
self.m_cargo['dll'].IceRayC_Geometry_Disc_Radius( self.m_cargo['this'], Scalar( P_radius ) )
def center( self, P_center: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Disc_Center( self.m_cargo['this'], AddresOf( P_center ) )
def normal( self, P_normal: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Disc_Normal( self.m_cargo['this'], AddresOf( P_normal ) )
class UDisc: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_UDisc0()
# TODO lo hi
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def radius( self, P_radius ):
self.m_cargo['dll'].IceRayC_Geometry_UDisc_Radius( self.m_cargo['this'], Scalar( P_radius ) )
class Ellipsoid: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Ellipsoid0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def center( self, P_center: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Ellipsoid_Center( self.m_cargo['this'], AddresOf( P_center ) )
def radiusS( self, P_radius ):
return self.m_cargo['dll'].IceRayC_Geometry_Ellipsoid_RadiusS( self.m_cargo['this'], Scalar( P_radius ) )
def radiusV( self, P_radius: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Ellipsoid_RadiusV( self.m_cargo['this'], AddresOf( P_radius ) )
def system( self, P_eX: Coord3D, P_eY: Coord3D, P_eZ: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Ellipsoid_RadiusV( self.m_cargo['this'], AddresOf( P_eX ), AddresOf( P_eY ), AddresOf( P_eZ ) )
class Hyperboloid: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll, P_core = None ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Hyperboloid0()
if( None != P_core ):
self.core( P_core )
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def core( self, P_core ):
self.m_cargo['dll'].IceRayC_Geometry_Hyperboloid_Core( self.m_cargo['this'], Scalar( P_core ) )
class Paraboloid: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Paraboloid0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
# def radius( self, P_radius ):
# self.m_cargo['dll'].IceRayC_Geometry_Paraoloid_Radius( self.m_cargo['this'], Scalar( P_radius ) )
class Plane: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Plane0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def origin( self, P_origin: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Plane_Origin( self.m_cargo['this'], AddresOf( P_origin ) )
def normal( self, P_normal: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Plane_Normal( self.m_cargo['this'], AddresOf( P_normal ) )
class Quadric: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Quadric0()
# TODO lo hi
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
class Saddle: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Saddle0()
# TODO lo hi
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
class Torus: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Torus0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def minor( self, P_minor ):
self.m_cargo['dll'].IceRayC_Geometry_Torus_Minor( self.m_cargo['this'], Scalar( P_minor ) )
class Triangle: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_Triangle0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
def origin( self, P_origin: Coord3D ): #TODO
return self.m_cargo['dll'].IceRayC_Geometry_Triangle_Origin( self.m_cargo['this'], AddresOf( P_origin ) )
def eX( self, P_eX: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Triangle_eX( self.m_cargo['this'], AddresOf( P_eX ) )
def eY( self, P_eY: Coord3D ):
return self.m_cargo['dll'].IceRayC_Geometry_Triangle_eY( self.m_cargo['this'], AddresOf( P_eY ) )
class UTriangle: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_UTriangle0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
class UCylinder: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_UCylinder0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
class USphere: #( IceRayPy.core.geometry.Generic ):
def __init__( self, P_dll ):
self.m_cargo = {}
self.m_cargo['dll'] = P_dll
self.m_cargo['this'] = self.m_cargo['dll'].IceRayC_Geometry_USphere0()
def __del__( self ):
self.m_cargo['dll'].IceRayC_Geometry_Release( self.m_cargo['this'] )
print( '</' + __name__ + ' name=\'' + __file__ + '\''+ '>' )
| 35.122378
| 148
| 0.621503
| 1,350
| 10,045
| 4.24
| 0.062963
| 0.124039
| 0.248078
| 0.161251
| 0.817086
| 0.817086
| 0.787911
| 0.780398
| 0.754542
| 0.645877
| 0
| 0.004682
| 0.234545
| 10,045
| 285
| 149
| 35.245614
| 0.739758
| 0.080239
| 0
| 0.521739
| 0
| 0
| 0.052979
| 0
| 0
| 0
| 0
| 0.003509
| 0
| 1
| 0.293478
| false
| 0
| 0.027174
| 0.076087
| 0.494565
| 0.01087
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
14094fa7e0b5adf255abf1ca43d58650948e6b95
| 184
|
py
|
Python
|
tutorials/variable definition and best practices with pep8/documentation.py
|
Phelipe-Sempreboni/python
|
8ad6d18df728d89e5b036759b3bdbec9c4d08f8a
|
[
"MIT"
] | null | null | null |
tutorials/variable definition and best practices with pep8/documentation.py
|
Phelipe-Sempreboni/python
|
8ad6d18df728d89e5b036759b3bdbec9c4d08f8a
|
[
"MIT"
] | null | null | null |
tutorials/variable definition and best practices with pep8/documentation.py
|
Phelipe-Sempreboni/python
|
8ad6d18df728d89e5b036759b3bdbec9c4d08f8a
|
[
"MIT"
] | null | null | null |
# Link para definicação de variáveis e boas práticas nessa abordagem (PEP8).
# https://www.python.org/dev/
# https://devguide.python.org/
# https://www.python.org/dev/peps/pep-0008/
| 36.8
| 76
| 0.728261
| 28
| 184
| 4.785714
| 0.714286
| 0.201493
| 0.208955
| 0.253731
| 0.298507
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030488
| 0.108696
| 184
| 5
| 77
| 36.8
| 0.786585
| 0.951087
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14194d200dbfe2460ba9d0f1f92a521d66d36d54
| 7,147
|
py
|
Python
|
tests/api/dag/test_requirements.py
|
entailor/pytailor
|
dfd15a0b2a6c1fea6432721ef3b2bc6fb5aad583
|
[
"BSD-3-Clause"
] | 9
|
2020-09-20T07:26:19.000Z
|
2022-02-28T09:12:30.000Z
|
tests/api/dag/test_requirements.py
|
entailor/pytailor
|
dfd15a0b2a6c1fea6432721ef3b2bc6fb5aad583
|
[
"BSD-3-Clause"
] | 2
|
2020-10-03T07:53:23.000Z
|
2020-10-12T11:40:24.000Z
|
tests/api/dag/test_requirements.py
|
entailor/pytailor
|
dfd15a0b2a6c1fea6432721ef3b2bc6fb5aad583
|
[
"BSD-3-Clause"
] | null | null | null |
from pytailor import PythonTask, BranchTask, DAG, Inputs
def test_specify_requirements_at_dag_level():
inputs = Inputs()
with DAG(requirements=["asdf", "fdsa"]) as dag:
t1 = PythonTask(function=print,
args=["test t1"])
with BranchTask(branch_data=inputs.data, parents=t1) as branch:
with DAG() as sub_dag:
t2 = PythonTask(function=print,
args=["test t2"])
t3 = PythonTask(function=print,
args=["test t3"],
parents=t2)
target1 = ["pytailor"]
target2 = ["asdf", "fdsa", "pytailor"]
assert dag.requirements == target2
assert t1.requirements == target2
assert branch.requirements == target2
assert sub_dag.requirements == target2
assert t2.requirements == target2
assert t3.requirements == target2
assert dag.get_all_requirements() == target2
assert t1.get_all_requirements() == target2
assert branch.get_all_requirements() == target2
assert sub_dag.get_all_requirements() == target2
assert t2.get_all_requirements() == target2
assert t3.get_all_requirements() == target2
def test_specify_requirements_at_branch_level():
inputs = Inputs()
with DAG() as dag:
t1 = PythonTask(function=print,
args=["test t1"])
with BranchTask(branch_data=inputs.data, parents=t1,
requirements=["asdf", "fdsa"]) as branch:
with DAG() as sub_dag:
t2 = PythonTask(function=print,
args=["test t2"])
t3 = PythonTask(function=print,
args=["test t3"],
parents=t2)
target1 = ["pytailor"]
target2 = ["asdf", "fdsa", "pytailor"]
assert dag.requirements == target1
assert t1.requirements == target1
assert branch.requirements == target2
assert sub_dag.requirements == target2
assert t2.requirements == target2
assert t3.requirements == target2
assert dag.get_all_requirements() == target2
assert t1.get_all_requirements() == target1
assert branch.get_all_requirements() == target2
assert sub_dag.get_all_requirements() == target2
assert t2.get_all_requirements() == target2
assert t3.get_all_requirements() == target2
def test_specify_requirements_at_sub_dag_level():
inputs = Inputs()
with DAG() as dag:
t1 = PythonTask(function=print,
args=["test t1"])
with BranchTask(branch_data=inputs.data, parents=t1) as branch:
with DAG(requirements=["asdf", "fdsa"]) as sub_dag:
t2 = PythonTask(function=print,
args=["test t2"])
t3 = PythonTask(function=print,
args=["test t3"],
parents=t2)
target1 = ["pytailor"]
target2 = ["asdf", "fdsa", "pytailor"]
assert dag.requirements == target1
assert t1.requirements == target1
assert branch.requirements == target1
assert sub_dag.requirements == target2
assert t2.requirements == target2
assert t3.requirements == target2
assert dag.get_all_requirements() == target2
assert t1.get_all_requirements() == target1
assert branch.get_all_requirements() == target2
assert sub_dag.get_all_requirements() == target2
assert t2.get_all_requirements() == target2
assert t3.get_all_requirements() == target2
def test_specify_requirements_at_task_level_1():
inputs = Inputs()
with DAG() as dag:
t1 = PythonTask(function=print,
args=["test t1"],
requirements=["asdf", "fdsa"])
with BranchTask(branch_data=inputs.data, parents=t1) as branch:
with DAG() as sub_dag:
t2 = PythonTask(function=print,
args=["test t2"])
t3 = PythonTask(function=print,
args=["test t3"],
parents=t2)
target1 = ["pytailor"]
target2 = ["asdf", "fdsa", "pytailor"]
assert dag.requirements == target1
assert t1.requirements == target2
assert branch.requirements == target1
assert sub_dag.requirements == target1
assert t2.requirements == target1
assert t3.requirements == target1
assert dag.get_all_requirements() == target2
assert t1.get_all_requirements() == target2
assert branch.get_all_requirements() == target1
assert sub_dag.get_all_requirements() == target1
assert t2.get_all_requirements() == target1
assert t3.get_all_requirements() == target1
def test_specify_requirements_at_task_level_2():
inputs = Inputs()
with DAG() as dag:
t1 = PythonTask(function=print,
args=["test t1"])
with BranchTask(branch_data=inputs.data, parents=t1) as branch:
with DAG() as sub_dag:
t2 = PythonTask(function=print,
args=["test t2"],
requirements=["asdf", "fdsa"])
t3 = PythonTask(function=print,
args=["test t3"],
parents=t2)
target1 = ["pytailor"]
target2 = ["asdf", "fdsa", "pytailor"]
assert dag.requirements == target1
assert t1.requirements == target1
assert branch.requirements == target1
assert sub_dag.requirements == target1
assert t2.requirements == target2
assert t3.requirements == target1
assert dag.get_all_requirements() == target2
assert t1.get_all_requirements() == target1
assert branch.get_all_requirements() == target2
assert sub_dag.get_all_requirements() == target2
assert t2.get_all_requirements() == target2
assert t3.get_all_requirements() == target1
def test_specify_requirements_at_task_level_3():
inputs = Inputs()
with DAG() as dag:
t1 = PythonTask(function=print,
args=["test t1"])
with BranchTask(branch_data=inputs.data, parents=t1) as branch:
with DAG() as sub_dag:
t2 = PythonTask(function=print,
args=["test t2"])
t3 = PythonTask(function=print,
args=["test t3"],
parents=t2,
requirements=["asdf", "fdsa"])
target1 = ["pytailor"]
target2 = ["asdf", "fdsa", "pytailor"]
assert dag.requirements == target1
assert t1.requirements == target1
assert branch.requirements == target1
assert sub_dag.requirements == target1
assert t2.requirements == target1
assert t3.requirements == target2
assert dag.get_all_requirements() == target2
assert t1.get_all_requirements() == target1
assert branch.get_all_requirements() == target2
assert sub_dag.get_all_requirements() == target2
assert t2.get_all_requirements() == target1
assert t3.get_all_requirements() == target2
| 38.424731
| 71
| 0.593956
| 743
| 7,147
| 5.538358
| 0.048452
| 0.193925
| 0.230863
| 0.157959
| 0.96452
| 0.954313
| 0.928311
| 0.91616
| 0.91616
| 0.91616
| 0
| 0.034269
| 0.301805
| 7,147
| 185
| 72
| 38.632432
| 0.790381
| 0
| 0
| 0.89441
| 0
| 0
| 0.044494
| 0
| 0
| 0
| 0
| 0
| 0.447205
| 1
| 0.037267
| false
| 0
| 0.006211
| 0
| 0.043478
| 0.111801
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1448a1f0a027f1aefcd1f890d44af32d3afd2ade
| 6,754
|
py
|
Python
|
grid_db/dbhelper.py
|
topd333/Xlab
|
28d89b3b18717957229ca52cb2cbbbc20bd31eae
|
[
"Unlicense"
] | null | null | null |
grid_db/dbhelper.py
|
topd333/Xlab
|
28d89b3b18717957229ca52cb2cbbbc20bd31eae
|
[
"Unlicense"
] | null | null | null |
grid_db/dbhelper.py
|
topd333/Xlab
|
28d89b3b18717957229ca52cb2cbbbc20bd31eae
|
[
"Unlicense"
] | null | null | null |
class RobustGridRouter(object):
"""
A router to control all database operations on models in the
grid_core application.
"""
def db_for_read(self, model, **hints):
"""
Attempts to read grid_core models go to robust_grid.
"""
if model._meta.app_label == 'grid_core':
return 'robust_grid'
return None
def db_for_write(self, model, **hints):
"""
Attempts to write grid_core models go to robust_grid.
"""
if model._meta.app_label == 'grid_core':
return 'robust_grid'
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Allow relations if a model in the grid_core app is involved.
"""
if obj1._meta.app_label == 'grid_core' or obj2._meta.app_label == 'grid_core':
return True
return None
def allow_migrate(self, db, model):
"""
Make sure the grid_core app only appears in the 'grid_core'
database.
"""
if db == 'robust_grid':
return model._meta.app_label == 'grid_core'
elif model._meta.app_label == 'grid_core':
return False
return None
def allow_syncdb(self, db, model):
"""Make sure the grid_core apps only appears on the robust_grid db"""
if model._meta.app_label in ['south']:
return True
if db == 'robust_grid':
return model._meta.app_label == 'grid_core'
elif model._meta.app_label == 'grid_core':
return False
return None
class StagingRouter(object):
"""
A router to control all database operations on models in the
grid_staging application.
"""
def db_for_read(self, model, **hints):
"""
Attempts to read grid_staging models go to staging.
"""
if model._meta.app_label == 'grid_staging':
return 'staging'
return None
def db_for_write(self, model, **hints):
"""
Attempts to write grid_staging models go to staging.
"""
if model._meta.app_label == 'grid_staging':
return 'staging'
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Allow relations if a model in the grid_staging app is involved.
"""
if obj1._meta.app_label == 'grid_staging' or obj2._meta.app_label == 'grid_staging':
return True
return None
def allow_migrate(self, db, model):
"""
Make sure the grid_staging app only appears in the 'staging'
database.
"""
if db == 'staging':
return model._meta.app_label == 'grid_staging'
elif model._meta.app_label == 'grid_staging':
return False
return None
def allow_syncdb(self, db, model):
"""Make sure the grid_staging apps only appears on the staging db"""
if model._meta.app_label in ['south']:
return True
if db == 'staging':
return model._meta.app_label == 'grid_staging'
elif model._meta.app_label == 'grid_staging':
return False
return None
class EstatesRouter(object):
"""
A router to control all database operations on models in the
grid_estates application.
"""
def db_for_read(self, model, **hints):
"""
Attempts to read grid_estates models go to grid_estates.
"""
if model._meta.app_label == 'grid_estates':
return 'estates'
return None
def db_for_write(self, model, **hints):
"""
Attempts to write grid_estates models go to estates.
"""
if model._meta.app_label == 'grid_estates':
return 'estates'
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Allow relations if a model in the grid_estates app is involved.
"""
if obj1._meta.app_label == 'grid_estates' or obj2._meta.app_label == 'grid_estates':
return True
return None
def allow_migrate(self, db, model):
"""
Make sure the grid_estates app only appears in the 'estates'
database.
"""
if db == 'estates':
return model._meta.app_label == 'grid_estates'
elif model._meta.app_label == 'grid_estates':
return False
return None
def allow_syncdb(self, db, model):
"""Make sure the grid_estates apps only appears on the estates db"""
if model._meta.app_label in ['south']:
return True
if db == 'estates':
return model._meta.app_label == 'grid_estates'
elif model._meta.app_label == 'grid_estates':
return False
return None
class SpaceRouter(object):
"""
A router to control all database operations on models in the
grid_space application.
"""
def db_for_read(self, model, **hints):
"""
Attempts to read grid_space models go to grid_space.
"""
if model._meta.app_label == 'grid_space':
return 'grid_space'
return None
def db_for_write(self, model, **hints):
"""
Attempts to write grid_space models go to grid_space.
"""
if model._meta.app_label == 'grid_space':
return 'grid_space'
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Allow relations if a model in the grid_space app is involved.
"""
if obj1._meta.app_label == 'grid_space' or obj2._meta.app_label == 'grid_space':
return True
return None
def allow_migrate(self, db, model):
"""
Make sure the grid_space app only appears in the 'grid_space'
database.
"""
if db == 'grid_space':
return model._meta.app_label == 'grid_space'
elif model._meta.app_label == 'grid_space':
return False
return None
def allow_syncdb(self, db, model):
"""Make sure the grid_space apps only appears on the grid_space db"""
if model._meta.app_label in ['south']:
return True
if db == 'grid_space':
return model._meta.app_label == 'grid_space'
elif model._meta.app_label == 'grid_space':
return False
return None
def allow_syncdb(self, db, model):
"""Make sure the grid_estates apps only appears on the grid_space db"""
if model._meta.app_label in ['south']:
return True
if db == 'grid_space':
return model._meta.app_label == 'grid_space'
elif model._meta.app_label == 'grid_space':
return False
return None
| 32.009479
| 92
| 0.582766
| 847
| 6,754
| 4.428571
| 0.069658
| 0.072781
| 0.124767
| 0.145028
| 0.950147
| 0.910157
| 0.868302
| 0.868302
| 0.857905
| 0.820581
| 0
| 0.003493
| 0.321883
| 6,754
| 210
| 93
| 32.161905
| 0.815502
| 0.240746
| 0
| 0.931034
| 0
| 0
| 0.116515
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181034
| false
| 0
| 0
| 0
| 0.698276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
144d88d497fe3c295d0245bf829e8f2257be0948
| 140
|
py
|
Python
|
pythonx/vim_pad/__init__.py
|
vim-scripts/vim-pad
|
2e441ae00c684079071086172e6dd0c1799fb262
|
[
"MIT"
] | 10
|
2015-01-26T05:24:27.000Z
|
2022-02-28T02:32:47.000Z
|
pythonx/vim_pad/__init__.py
|
vim-scripts/vim-pad
|
2e441ae00c684079071086172e6dd0c1799fb262
|
[
"MIT"
] | null | null | null |
pythonx/vim_pad/__init__.py
|
vim-scripts/vim-pad
|
2e441ae00c684079071086172e6dd0c1799fb262
|
[
"MIT"
] | null | null | null |
import vim_pad.handler
import vim_pad.list_local
import vim_pad.pad_local
import vim_pad.vim_globals
vim_pad.vim_globals.set_vim_globals()
| 20
| 37
| 0.871429
| 26
| 140
| 4.269231
| 0.307692
| 0.27027
| 0.432432
| 0.306306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 140
| 6
| 38
| 23.333333
| 0.853846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
145f3a1caccc87387ceb4b6564b7156b5aa1cad0
| 9,176
|
py
|
Python
|
video/dataset.py
|
arash-safari/vp
|
377e0172112157b79690b32349481a17e7590063
|
[
"MIT"
] | null | null | null |
video/dataset.py
|
arash-safari/vp
|
377e0172112157b79690b32349481a17e7590063
|
[
"MIT"
] | null | null | null |
video/dataset.py
|
arash-safari/vp
|
377e0172112157b79690b32349481a17e7590063
|
[
"MIT"
] | null | null | null |
from torch.utils.data import Dataset
import numpy as np
from collections import namedtuple
import lmdb
import pickle
import torch
import os
import cv2
import imageio
import h5py
from torchvision import transforms
CodeRowVideoMnist = namedtuple('CodeRowVideoMnist', ['ids', 'video_ind'])
class MnistVideoDataset(Dataset):
def __init__(self, path, frame_len):
self.frame_len = int(frame_len)
self.frames = np.load(path)
self.frames = self.frames.swapaxes(0, 1).astype(np.float32)
# self.frames[self.frames > 0] = 1.
frames_shape = self.frames.shape
videos_num = frames_shape[0]
video_len = frames_shape[1]
self.sample_per_video = video_len - frame_len + 1
self.length = videos_num * self.sample_per_video
def __len__(self):
return self.length
def __getitem__(self, index):
video_ind = int(index / self.sample_per_video)
frame_ind = index - video_ind * self.sample_per_video
return self.frames[video_ind, frame_ind: frame_ind + self.frame_len, :, :], video_ind, frame_ind
class lmdb_ffhq(Dataset):
def __init__(self, env_path):
self.env = lmdb.open(
env_path,
max_readers=32,
readonly=True,
lock=False,
readahead=False,
meminit=False,
)
self.transform = transforms.Compose(
[
transforms.ToTensor(),
transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]),
]
)
if not self.env:
raise IOError('Cannot open lmdb dataset', env_path)
with self.env.begin(write=False) as txn:
self.totalIms = txn.stat()['entries']
def __len__(self):
return self.totalIms
def __getitem__(self, index):
with self.env.begin(write=False) as txn:
key = str(index).encode('utf-8')
image = pickle.loads(txn.get(key))
image = cv2.imdecode(image, 1)
image = self.transform(image)
# print(image.shape)
# image = image.transpose(0,2)
# print(image.shape)
return image
# class MnistVideoDataset2(Dataset):
# def __init__(self, path, frame_len):
# self.frame_len = int(frame_len)
# self.frames = np.load(path)
# self.frames = self.frames.swapaxes(0, 1).astype(np.float32)
# self.frames[self.frames > 0] = 1.
# frames_shape = self.frames.shape
# videos_num = frames_shape[0]
# video_len = frames_shape[1]
# self.sample_per_video = video_len - frame_len + 1
# self.length = (videos_num * self.sample_per_video * (self.sample_per_video -1) )/2
#
# def __len__(self):
# return self.length
#
# def __getitem__(self, index):
# video_ind = int(2 * index / (self.sample_per_video*(self.sample_per_video - 1)))
# frame_ind = index - video_ind * (self.sample_per_video*(self.sample_per_video - 1))
# return self.frames[video_ind, frame_ind: min(frame_ind + self.frame_len, self.frames.shape[1]), :, :], video_ind, frame_ind
class lmdb_video(Dataset):
def __init__(self, env_path, frames_len):
self.env = lmdb.open(
env_path,
max_readers=32,
readonly=True,
lock=False,
readahead=False,
meminit=False,
)
self.frames_len = int(frames_len)
if not self.env:
raise IOError('Cannot open lmdb dataset', env_path)
with self.env.begin(write=False) as txn:
self.videos_ind = pickle.loads(txn.get('videos_ind'.encode('utf-8')))
self.frames_ind = pickle.loads(txn.get('frames_ind'.encode('utf-8')))
self.sections = []
video_idx = 0
frame_idx = 0
while frame_idx < len(self.frames_ind):
if (video_idx + 1 < len(self.videos_ind)):
if frame_idx + self.frames_len < self.videos_ind[video_idx + 1]:
self.sections.append(frame_idx)
frame_idx += 1
else:
video_idx += 1
frame_idx = self.videos_ind[video_idx]
else:
self.sections.append(frame_idx)
frame_idx += 1
def __len__(self):
return len(self.sections)
def __getitem__(self, index):
frames = []
with self.env.begin(write=False) as txn:
frame_idx = self.sections[index]
for i in range(self.frames_len):
key = str(frame_idx + i).encode('utf-8')
frame = pickle.loads(txn.get(key))
frame = cv2.imdecode(frame, 1)
frames.append(frame)
if len(frames) == 1:
return torch.from_numpy(frames[0])
return torch.from_numpy(np.asarray(frames))
class lmdb_kth_running(Dataset):
def __init__(self, env_path, frames_len):
self.env = lmdb.open(
env_path,
max_readers=32,
readonly=True,
lock=False,
readahead=False,
meminit=False,
)
self.frames_len = int(frames_len)
if not self.env:
raise IOError('Cannot open lmdb dataset', env_path)
with self.env.begin(write=False) as txn:
self.videos_ind = pickle.loads(txn.get('videos_ind'.encode('utf-8')))
self.frames_ind = pickle.loads(txn.get('frames_ind'.encode('utf-8')))
self.sections = []
video_idx = 0
frame_idx = 0
while frame_idx < len(self.frames_ind):
if (video_idx + 1 < len(self.videos_ind)):
if frame_idx + self.frames_len < self.videos_ind[video_idx + 1]:
self.sections.append(frame_idx)
frame_idx += 1
else:
video_idx += 1
frame_idx = self.videos_ind[video_idx]
else:
self.sections.append(frame_idx)
frame_idx += 1
def __len__(self):
return len(self.sections)
def __getitem__(self, index):
frames = []
with self.env.begin(write=False) as txn:
frame_idx = self.sections[index]
for i in range(self.frames_len):
key = str(frame_idx + i).encode('utf-8')
frame = pickle.loads(txn.get(key))
frame = cv2.imdecode(frame, 1)
# print(frame.shape)
frame = frame[:, :, 0:1].astype(np.float16) / 256 - 0.5
frame = frame.transpose(2, 0, 1)
# print(frame.shape)
frames.append(frame)
if len(frames) == 1:
return torch.from_numpy(frames[0])
return torch.from_numpy(np.asarray(frames))
class MnistVideoCodeLMDBDataset(Dataset):
def __init__(self, path, frame_len):
self.env = lmdb.open(
path,
max_readers=32,
readonly=True,
lock=False,
readahead=False,
meminit=False,
)
self.frame_len = int(frame_len)
video_len = 20
self.sample_per_video = video_len - frame_len + 1
if not self.env:
raise IOError('Cannot open lmdb dataset', path)
with self.env.begin(write=False) as txn:
self.length = int(txn.get('length'.encode('utf-8')).decode('utf-8')) * self.sample_per_video
def __len__(self):
return self.length
def __getitem__(self, index):
with self.env.begin(write=False) as txn:
video_ind = int(index / self.sample_per_video)
frame_ind = index - video_ind * self.sample_per_video
key = str(video_ind).encode('utf-8')
row = pickle.loads(txn.get(key))
return torch.from_numpy(row.ids[frame_ind: frame_ind + self.frame_len]), row.video_ind, frame_ind
class MnistVideoCodeLMDBDataset2(Dataset):
def __init__(self, path, frame_len):
self.env = lmdb.open(
path,
max_readers=32,
readonly=True,
lock=False,
readahead=False,
meminit=False,
)
self.frame_len = int(frame_len)
video_len = 20
self.sample_per_video = video_len - frame_len + 1
if not self.env:
raise IOError('Cannot open lmdb dataset', path)
with self.env.begin(write=False) as txn:
self.length = int(txn.get('length'.encode('utf-8')).decode('utf-8')) * int(
(self.sample_per_video * (self.sample_per_video + 1)) / 2)
def __len__(self):
return self.length
def __getitem__(self, index):
with self.env.begin(write=False) as txn:
video_ind = int(index / self.sample_per_video)
frame_ind = index - video_ind * int((self.sample_per_video * (self.sample_per_video + 1)) / 2)
key = str(video_ind).encode('utf-8')
row = pickle.loads(txn.get(key))
return torch.from_numpy(
row.ids[frame_ind: min(frame_ind + self.frame_len, row.ids.shape[0])]), row.video_ind, frame_ind
| 33.985185
| 133
| 0.572472
| 1,159
| 9,176
| 4.293356
| 0.094909
| 0.050241
| 0.054863
| 0.075965
| 0.861133
| 0.844051
| 0.827773
| 0.808481
| 0.797629
| 0.778537
| 0
| 0.017829
| 0.315388
| 9,176
| 269
| 134
| 34.111524
| 0.774276
| 0.115519
| 0
| 0.736318
| 0
| 0
| 0.033745
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089552
| false
| 0
| 0.054726
| 0.029851
| 0.243781
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1469df6c49b7517486ce6f013f2c6e79f1bb7398
| 22,419
|
py
|
Python
|
test/test_dayuPath.py
|
DangoWang/dayu_path
|
7a468bfa0700951c84cd22bc3a6c854b82f86e5e
|
[
"MIT"
] | 1
|
2020-12-17T12:55:12.000Z
|
2020-12-17T12:55:12.000Z
|
test/test_dayuPath.py
|
DangoWang/dayu_path
|
7a468bfa0700951c84cd22bc3a6c854b82f86e5e
|
[
"MIT"
] | null | null | null |
test/test_dayuPath.py
|
DangoWang/dayu_path
|
7a468bfa0700951c84cd22bc3a6c854b82f86e5e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
__author__ = 'andyguo'
import unittest
from unittest import TestCase
from dayu_path import DayuPath
class TestDayuPath(TestCase):
def test___new__(self):
self.assertEqual(DayuPath(''), None)
self.assertEqual(DayuPath([]), None)
self.assertEqual(DayuPath(tuple()), None)
self.assertEqual(DayuPath(set()), None)
self.assertEqual(DayuPath(dict()), None)
self.assertEqual(DayuPath('any_string'), 'any_string')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/111111111.jpg'), '/Users/andyguo/Desktop/111111111.jpg')
self.assertEqual(DayuPath(u'/Users/andyguo/Desktop/中文路径 测试.jpg'), u'/Users/andyguo/Desktop/中文路径 测试.jpg')
self.assertEqual(DayuPath('D:/data/test.jpg'), 'd:/data/test.jpg')
self.assertEqual(DayuPath('d:\\data\\test.jpg'), 'd:/data/test.jpg')
self.assertEqual(DayuPath('D:\\data\\test.jpg'), 'd:/data/test.jpg')
obj = DayuPath('/Users/andyguo/Desktop/111111111.jpg')
self.assertIs(DayuPath(obj), obj)
def test_os_functions(self):
path = DayuPath(self.mock_path).child('cam_test', 'A001C001_180212_RG8C.9876521.exr')
self.assertIsNotNone(path.state())
self.assertIsNotNone(path.lstate())
self.assertIsNotNone(path.exists())
self.assertIsNotNone(path.lexists())
self.assertIsNotNone(path.isfile())
self.assertIsNotNone(path.isdir())
self.assertIsNotNone(path.islink())
self.assertIsNotNone(path.ismount())
self.assertIsNotNone(path.atime())
self.assertIsNotNone(path.ctime())
self.assertIsNotNone(path.mtime())
self.assertIsNotNone(path.size())
def test_frame(self):
self.assertEqual(DayuPath('/Users/andyguo/Desktop/1.jpg').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/12.jpg').frame, 12)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/1001.jpg').frame, 1001)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/0024.jpg').frame, 24)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/1.mov').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/14.mov').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/123.mp4').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/v001.jpg').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/v002_999.jpg').frame, 999)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/aaa_test.1.jpg').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/aaa_test.12.jpg').frame, 12)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/aaa_test.123.jpg').frame, 123)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/aa_v001.jpg').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_v0023.012.jpg').frame, 12)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_v0023_1234.jpg').frame, 1234)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_v0023.jpg').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_v0023.mov').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/MVI1023.jpg').frame, 1023)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/MVI1023.MP4').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/test576bb.mov').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/test576bb.jpg').frame, -1)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/576_hkke.jpg').frame, -1)
self.assertEqual(DayuPath(u'/Users/andyguo/Desktop/中文_1001.jpg').frame, 1001)
self.assertEqual(DayuPath(u'/Users/andyguo/Desktop/中文 1001.jpg').frame, 1001)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/ttt/asdfasdf/pl_0010.1012.tiff').frame, 1012)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/ttt/asdfasdf/pl_0010.1012.mov').frame, -1)
def test_pattern(self):
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_v0023.jpg').pattern, None)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_%d.jpg').pattern, '%d')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_%02d.jpg').pattern, '%02d')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_%03d.jpg').pattern, '%03d')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_%04d.jpg').pattern, '%04d')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_#.jpg').pattern, '#')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_##.jpg').pattern, '##')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_###.jpg').pattern, '###')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_####.jpg').pattern, '####')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_$F.jpg').pattern, '$F')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_$F2.jpg').pattern, '$F2')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_$F3.jpg').pattern, '$F3')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_0010_plt_$F4.jpg').pattern, '$F4')
self.assertEqual(DayuPath(u'/Users/andyguo/Desktop/中文的测试$F4.jpg').pattern, '$F4')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/pl_%04d_ani_$F4.jpg').pattern, '%04d')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/ani_$F4.mov').pattern, '$F4')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/abc.mov').pattern, None)
def test_to_pattern(self):
self.assertEqual(DayuPath('/Users/andyguo/Desktop/1.jpg').to_pattern(), '/Users/andyguo/Desktop/1.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/11.jpg').to_pattern(), '/Users/andyguo/Desktop/%02d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/11.jpg').to_pattern('#'), '/Users/andyguo/Desktop/##.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/11.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F2.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/123.jpg').to_pattern(), '/Users/andyguo/Desktop/%03d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/123.jpg').to_pattern('#'), '/Users/andyguo/Desktop/###.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/123.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F3.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/1234.jpg').to_pattern(), '/Users/andyguo/Desktop/%04d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/1234.jpg').to_pattern('#'), '/Users/andyguo/Desktop/####.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/1234.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F4.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/1234.jpg').to_pattern('ss'),
'/Users/andyguo/Desktop/%04d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%02d.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%02d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%02d.jpg').to_pattern('#'), '/Users/andyguo/Desktop/##.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%02d.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F2.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%03d.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%03d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%03d.jpg').to_pattern('#'), '/Users/andyguo/Desktop/###.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%03d.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F3.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%04d.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%04d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%04d.jpg').to_pattern('#'), '/Users/andyguo/Desktop/####.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%04d.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F4.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/%04d.jpg').to_pattern('1'), '/Users/andyguo/Desktop/%04d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/##.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%02d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/##.jpg').to_pattern('#'), '/Users/andyguo/Desktop/##.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/##.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F2.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/###.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%03d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/###.jpg').to_pattern('#'), '/Users/andyguo/Desktop/###.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/###.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F3.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/####.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%04d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/####.jpg').to_pattern('#'), '/Users/andyguo/Desktop/####.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/####.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F4.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/####.jpg').to_pattern('f'), '/Users/andyguo/Desktop/%04d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F2.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%02d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F2.jpg').to_pattern('#'), '/Users/andyguo/Desktop/##.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F2.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F2.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F3.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%03d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F3.jpg').to_pattern('#'), '/Users/andyguo/Desktop/###.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F3.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F3.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F4.jpg').to_pattern('%'), '/Users/andyguo/Desktop/%04d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F4.jpg').to_pattern('#'), '/Users/andyguo/Desktop/####.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F4.jpg').to_pattern('$'), '/Users/andyguo/Desktop/$F4.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/$F4.jpg').to_pattern('dd'), '/Users/andyguo/Desktop/%04d.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/MVI1001.mov').to_pattern('%'),
'/Users/andyguo/Desktop/MVI1001.mov')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/MVI1001.mov').to_pattern('#'),
'/Users/andyguo/Desktop/MVI1001.mov')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/MVI1001.mov').to_pattern('$'),
'/Users/andyguo/Desktop/MVI1001.mov')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/MVI1001.MP4').to_pattern(),
'/Users/andyguo/Desktop/MVI1001.MP4')
def test_restore_pattern(self):
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.$F.jpg').restore_pattern(12),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.12.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.$F2.jpg').restore_pattern(12),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.12.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.$F3.jpg').restore_pattern(12),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.012.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.$F4.jpg').restore_pattern(12),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.0012.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.%d.jpg').restore_pattern(1920),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.1920.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.%0d.jpg').restore_pattern(192),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.192.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.%02d.jpg').restore_pattern(1920),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.1920.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.%03d.jpg').restore_pattern(1001),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.1001.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.%04d.jpg').restore_pattern(364),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.0364.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.#.jpg').restore_pattern(364),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.364.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.##.jpg').restore_pattern(364),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.364.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.###.jpg').restore_pattern(364),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.364.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.####.jpg').restore_pattern(364),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.0364.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.%04d.jpg').restore_pattern(0),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.0000.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.1234.jpg').restore_pattern(-1),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.1234.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.1234.jpg').restore_pattern(2345),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.1234.jpg')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/sd_0010_plt_v0002.1234.jpg').restore_pattern(None),
'/Users/andyguo/Desktop/sd_0010_plt_v0002.1234.jpg')
def setUp(self):
super(TestDayuPath, self).setUp()
from uuid import uuid4
self.mock_path = DayuPath('~').expand_user().child(uuid4().hex)
self.mock_path2 = DayuPath('~').expand_user().child(uuid4().hex)
content_list = ['first_depth_0010.1001.dpx',
'first_depth_0010.1002.dpx',
'cam_test/A001C001_180212_RG8C.9876521.exr',
'cam_test/A001C001_180212_RG8C.9876522.exr',
'cam_test/A001C001_180212_RG8C.9876523.exr',
'vfx_test/pl_0010_plt_v0001.1001.exr',
'vfx_test/pl_0010_plt_v0001.1002.exr',
'vfx_test/pl_0010_plt_v0001.1003.exr',
'not_a_sequence/abc.exr',
'single_media_test/pl_0010_plt_v0001.1003.mov',
'single_media_test/MVI1022.MP4',
u'single_media_test/测试中文.MP4',
'missing_test/dd_0090_ani_1001.jpg',
'missing_test/dd_0090_ani_1003.jpg',
'missing_test/dd_0090_ani_1005.jpg',
'ignore_test/._DS_store',
'ignore_test/..sdf',
'recursive_test/a_001.exr',
'recursive_test/a_002.exr',
'recursive_test/inside/b_100.exr',
'recursive_test/inside/b_101.exr',
'recursive_test/inside/b_102.exr',
]
for x in content_list:
file_path = DayuPath(u'{}/{}'.format(self.mock_path, x))
file_path.parent.mkdir(parents=True)
with open(file_path, 'w') as f:
f.write('1')
self.mock_path.child('empty_folder', 'inside').mkdir(parents=True)
def tearDown(self):
super(TestDayuPath, self).tearDown()
self.mock_path.rmtree()
def test_scan(self):
path = self.mock_path
result = list(path.scan())
self.assertEqual(result[0], path.child('first_depth_0010.%04d.dpx'))
self.assertEqual(result[0].frames, [1001, 1002])
self.assertEqual(result[0].missing, [])
ground_truth_result = {path.child('first_depth_0010.%04d.dpx') : [[1001, 1002], []],
path.child('cam_test', 'A001C001_180212_RG8C.%07d.exr') : [
[9876521, 9876522, 9876523], []],
path.child('vfx_test', 'pl_0010_plt_v0001.%04d.exr') : [[1001, 1002, 1003], []],
path.child('not_a_sequence', 'abc.exr') : [[], []],
path.child('single_media_test', 'pl_0010_plt_v0001.1003.mov'): [[], []],
path.child('single_media_test', 'MVI1022.MP4') : [[], []],
path.child(u'single_media_test', u'测试中文.MP4') : [[], []],
path.child('missing_test', 'dd_0090_ani_%04d.jpg') : [[1001, 1003, 1005],
[1002, 1004]],
path.child('recursive_test', 'a_%03d.exr') : [[1, 2], []],
path.child('recursive_test', 'inside', 'b_%03d.exr') : [[100, 101, 102], []],
}
ground_truth_result.update({
self.mock_path2.child('recursive_test', 'inside', 'b_%03d.exr'): [[100, 101, 102], []]
})
print ground_truth_result.keys()
for x in path.scan(recursive=True):
if x:
print x
self.assertTrue(x in ground_truth_result.keys())
self.assertListEqual([x.frames, x.missing], ground_truth_result[x])
for x in self.mock_path2.scan(recursive=True):
self.assertTrue(x in ground_truth_result.keys())
self.assertListEqual([x.frames, x.missing], ground_truth_result[x])
for x in path.child('vfx_test', 'pl_0010_plt_v0001.1001.exr').scan():
self.assertEqual(x, path.child('vfx_test', 'pl_0010_plt_v0001.%04d.exr'))
self.assertEqual(x.frames, [1001, 1002, 1003])
self.assertEqual(x.missing, [])
for x in path.child('missing_test').scan():
if x:
self.assertEqual(x, path.child('missing_test', 'dd_0090_ani_%04d.jpg'))
self.assertListEqual([x.frames, x.missing], ground_truth_result[x])
for x in path.child(u'single_media_test', u'测试中文.MP4').scan():
self.assertEqual(x, path.child(u'single_media_test', u'测试中文.MP4'))
self.assertEqual(x.frames, [])
self.assertEqual(x.missing, [])
for x in path.child('not_a_sequence', 'abc.exr').scan():
self.assertEqual(x, path.child('not_a_sequence', 'abc.exr'))
self.assertEqual(x.frames, [])
self.assertEqual(x.missing, [])
self.assertFalse(list(path.child('vfx_test', 'pl_0010_plt_v0002.1001.exr').scan()))
self.assertFalse(list(path.child('vfx_test', 'pl_0010_plt_v0002.1001.exr').scan(recursive=True)))
self.assertFalse(list(path.child('empty_folder').scan(recursive=True)))
self.assertNotIn(path.child('ignore_test', '._DS_store'), [x for x in path.scan(recursive=True)])
self.assertNotIn(path.child('ignore_test', '..sdf'), [x for x in path.scan(recursive=True)])
self.assertNotIn(path.child('ignore_test', 'Thumbnail'), [x for x in path.scan(recursive=True)])
self.assertNotIn(path.child('ignore_test', 'temp.tmp'), [x for x in path.scan(recursive=True)])
def test_escape(self):
legal_path = DayuPath('/Users/andyguo/Desktop/111.mov')
self.assertEqual(legal_path.escape(), '/Users/andyguo/Desktop/111.mov')
whitespace_path = DayuPath('/Users/andyguo/Desktop/some words with space.mov')
self.assertEqual(whitespace_path.escape(), '/Users/andyguo/Desktop/some\ words\ with\ space.mov')
bash_string = DayuPath('The$!cat#&ran\"\'up()a|<>tree`;')
self.assertEqual(bash_string.escape(), r'The\$\!cat\#\&ran\"\'up\(\)a\|\<\>tree\`\;')
unicode_string = DayuPath(u'/Users/andyguo/Desktop/中文 和 空格12234 rer.jpg')
self.assertEqual(unicode_string.escape(), u'/Users/andyguo/Desktop/中文\ 和\ 空格12234\ rer.jpg')
def test_version(self):
self.assertEqual(DayuPath('/Users/andyguo/Desktop/v001/111.mov').version, 'v001')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/V001/111.mov').version, 'V001')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/v001/A001C001_180212_DF3X.mov').version, 'v001')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/v003/pl_0010_plt_bga_v0002.1001.mov').version, 'v0002')
self.assertEqual(DayuPath('/Users/andyguo/Desktop/dd/pl_0010_plt_bga.1001.mov').version, None)
self.assertEqual(DayuPath('/Users/andyguo/Desktop/vv/pl_0010_plt_bga.1001.mov').version, None)
self.assertEqual(DayuPath('not a path').version, None)
@unittest.skip('only for mac local test')
def test_root(self):
self.assertEqual(DayuPath('/Users/andyguo/Desktop/abc.jpg').root, '/')
self.assertEqual(DayuPath('/Volumes/filedata/td/finder.lnk').root, '/Volumes/filedata')
self.assertIsInstance(DayuPath('/Volumes/filedata/td/finder.lnk').root, DayuPath)
@unittest.skip('only for mac local test')
def test_is_network(self):
self.assertTrue(DayuPath('/Volumes/filedata/td/finder.lnk').is_network)
self.assertFalse(DayuPath('/Users/andyguo/Desktop/log.txt').is_network)
@unittest.skip('only for mac local test')
def test_is_local(self):
self.assertFalse(DayuPath('/Volumes/filedata/td/finder.lnk').is_local)
self.assertTrue(DayuPath('/Users/andyguo/Desktop/log.txt').is_local)
| 69.624224
| 120
| 0.640707
| 2,740
| 22,419
| 5.079197
| 0.086861
| 0.161242
| 0.255299
| 0.223108
| 0.827046
| 0.802328
| 0.765754
| 0.68851
| 0.657972
| 0.552274
| 0
| 0.072773
| 0.184799
| 22,419
| 321
| 121
| 69.841122
| 0.688717
| 0.001963
| 0
| 0.105263
| 0
| 0
| 0.392482
| 0.347785
| 0
| 0
| 0
| 0
| 0.603509
| 0
| null | null | 0
| 0.014035
| null | null | 0.007018
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ade610ff793023e3fccc14b332836d0cc40d9a6
| 481
|
py
|
Python
|
src/openbiolink/graph_creation/file_reader/__init__.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 97
|
2019-11-26T09:53:18.000Z
|
2022-03-19T10:33:10.000Z
|
src/openbiolink/graph_creation/file_reader/__init__.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 67
|
2019-12-09T21:01:52.000Z
|
2021-12-21T15:19:41.000Z
|
src/openbiolink/graph_creation/file_reader/__init__.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 20
|
2020-01-13T23:02:25.000Z
|
2022-03-16T21:43:31.000Z
|
from openbiolink.graph_creation.file_reader.csvReader import CsvReader
from openbiolink.graph_creation.file_reader.edge import *
from openbiolink.graph_creation.file_reader.fileReader import FileReader
from openbiolink.graph_creation.file_reader.mapping import *
from openbiolink.graph_creation.file_reader.oboReader import OboReader
from openbiolink.graph_creation.file_reader.onto import *
from openbiolink.graph_creation.file_reader.postgresDumpReader import PostgresDumpReader
| 60.125
| 88
| 0.891892
| 60
| 481
| 6.916667
| 0.233333
| 0.253012
| 0.337349
| 0.472289
| 0.684337
| 0.684337
| 0.318072
| 0
| 0
| 0
| 0
| 0
| 0.058212
| 481
| 7
| 89
| 68.714286
| 0.916115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1ade6dcfc500be4ef12c5172795906719d7a5bbc
| 92
|
py
|
Python
|
wk1/function.py
|
lokijota/datadrivenastronomymooc
|
175655e5c6450c091534299da6bce6f10a1a3627
|
[
"MIT"
] | 8
|
2018-12-09T18:10:16.000Z
|
2021-03-21T16:38:58.000Z
|
wk1/function.py
|
lokijota/datadrivenastronomymooc
|
175655e5c6450c091534299da6bce6f10a1a3627
|
[
"MIT"
] | null | null | null |
wk1/function.py
|
lokijota/datadrivenastronomymooc
|
175655e5c6450c091534299da6bce6f10a1a3627
|
[
"MIT"
] | 5
|
2018-11-09T16:57:17.000Z
|
2020-04-15T09:11:33.000Z
|
def double(val):
return val + val
print(double(3))
print(double(3.3))
print(double('3'))
| 11.5
| 18
| 0.663043
| 16
| 92
| 3.8125
| 0.375
| 0.540984
| 0.590164
| 0.42623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.130435
| 92
| 7
| 19
| 13.142857
| 0.7125
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0.2
| 0.4
| 0.6
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 7
|
212c99d5fafb1cd09d4cdfeed6552f9c086f5663
| 7,626
|
py
|
Python
|
test/unit/test_http_receive.py
|
bischjer/auxiliary
|
e42d8a4af43c9bd4d816c03edc2465640635b46b
|
[
"BSD-3-Clause"
] | null | null | null |
test/unit/test_http_receive.py
|
bischjer/auxiliary
|
e42d8a4af43c9bd4d816c03edc2465640635b46b
|
[
"BSD-3-Clause"
] | null | null | null |
test/unit/test_http_receive.py
|
bischjer/auxiliary
|
e42d8a4af43c9bd4d816c03edc2465640635b46b
|
[
"BSD-3-Clause"
] | null | null | null |
from unittest2 import TestCase
from aux.protocol.http.http import HTTP
import struct
import os
class FakeTransport(object):
def __init__(self, message):
self.fake_message = message
self.bytes_read = 0
def recv(self, nofchar=1200):
buffer = ""
for n in xrange(0, nofchar):
if self.bytes_read >= len(self.fake_message):
break
else:
buffer += self.fake_message[self.bytes_read]
self.bytes_read += 1
return buffer
def close(self):
pass
class HTTP_RECEIVE_TEST(TestCase):
def test_receive_200_startline_only(self):
message = "HTTP/1.1 200 OK\r\n"
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEquals(response.status,
200)
def test_receive_200_only_headers(self):
message = """HTTP/1.1 200 OK\r\nServer: nginx/1.5.13\r\nDate: Sat, 02 Aug 2014 19:40:38 GMT\r\nContent-Type: text/html\r\nContent-Length: 0\r\nLast-Modified: Mon, 14 Apr 2014 08:38:26 GMT\r\nConnection: keep-alive\r\nExpires: Sat, 02 Aug 2014 20:40:38 GMT\r\nCache-Control: max-age=3600\r\nAccept-Ranges: bytes\r\n\r\n"""
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEquals(len(response.body), 0)
self.assertEquals(len(response.headers), 9)
def xtest_receive_200_with_json_body(self):
message = """HTTP/1.1 200 OK\r\nContent-Type: application/json\r\nContent-Length: 15\r\n\r\n{{Hello:world}}"""
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), 15)
def test_receive_200_with_long_body(self):
data_length = 1664
data = "".join(['ABCDEFGHIJKLMNOPQRSTUVWXYZ'[i%26] for i in xrange(0, data_length)])
message = """HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nContent-Length: %i\r\n\r\n%s""" % (data_length, data)
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), data_length)
def test_receive_200_with_chunked_no_body(self):
message = """HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nTransfer-Encoding : chunked\r\n\r\n0\r\n\r\n0"""
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), 0)
def test_receive_200_with_chunked_no_body_one_terminating_zero(self):
message = """HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nTransfer-Encoding : chunked\r\n\r\n0"""
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), 0)
def test_receive_200_with_chunked_body_one_terminating_zero(self):
message = '''HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nTransfer-Encoding : chunked\r\n\r\nbf\r\n{"status":400,"code":"Client.UserInputException","message":"No content to map due to end-of-input\n at [Source: org.apache.catalina.connector.CoyoteInputStream@3d820d7f; line: 1, column: 1]"}\r\n
0'''
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), 191)
def test_receive_200_with_chunked_body(self):
message = """HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nTransfer-Encoding : chunked\r\n\r\n1a\r\nABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n0\r\n\r\n0"""
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), 26)
def test_receive_200_with_chunked_multi_body(self):
message = """HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nTransfer-Encoding : chunked\r\n\r\n1a\r\nABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n0\r\n\r\n0"""
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), 234)
def test_receive_200_with_chunked_long_body(self):
message = """HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nTransfer-Encoding : chunked\r\n\r\n1a\r\nABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n34\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\nd0\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\nd0\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\nd0\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\nd0\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\nd0\r\nABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ\r\n0\r\n\r\n0"""
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), 1742)
def test_receive_200_with_chunked_long_body(self):
data_length = 4096
data = "".join(['ABCDEFGHIJKLMNOPQRSTUVWXYZ'[i%26] for i in xrange(0, data_length)])
message = """HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nTransfer-Encoding : chunked\r\n\r\n%s\r\n%s\r\n%s\r\n%s\r\n0\r\n\r\n0""" % (hex(data_length)[2:] ,data, hex(data_length)[2:] ,data)
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(len(response.body), data_length*2)
def test_receive_200_with_chunked_binary_body(self):
byte_range = 256
data = "".join([struct.pack('B', i) for i in xrange(0,byte_range)])
message = """HTTP/1.1 200 OK\r\nContent-Type: application/zip;charset=UTF-8\r\nTransfer-Encoding : chunked\r\nContent-Disposition : attachment; filename="test_chunkbin.zip"\r\n\r\n100\r\n%s\r\n0\r\n\r\n0""" % data
http = HTTP()
response = http.receive(FakeTransport(message))
self.assertEqual(response.body, "/tmp/aux/test_chunkbin.zip")
self.assertTrue(os.path.exists(response.body))
self.assertEqual(os.path.getsize(response.body), byte_range)
| 66.313043
| 2,047
| 0.741542
| 879
| 7,626
| 6.323094
| 0.186576
| 0.010076
| 0.009176
| 0.177402
| 0.792371
| 0.769162
| 0.759086
| 0.754048
| 0.731198
| 0.715005
| 0
| 0.040356
| 0.145424
| 7,626
| 114
| 2,048
| 66.894737
| 0.81249
| 0
| 0
| 0.32967
| 0
| 0.120879
| 0.505967
| 0.416787
| 0
| 0
| 0
| 0
| 0.164835
| 1
| 0.164835
| false
| 0.010989
| 0.043956
| 0
| 0.241758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2137fee3050ecdab30554391f84e6fa5737fdca5
| 7,880
|
py
|
Python
|
company/migrations/0004_eshop.py
|
vavshop/VavXml
|
2d87af8dbad5889b6f809423bf71f9d5e8393cce
|
[
"CC0-1.0"
] | null | null | null |
company/migrations/0004_eshop.py
|
vavshop/VavXml
|
2d87af8dbad5889b6f809423bf71f9d5e8393cce
|
[
"CC0-1.0"
] | null | null | null |
company/migrations/0004_eshop.py
|
vavshop/VavXml
|
2d87af8dbad5889b6f809423bf71f9d5e8393cce
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2018-01-04 13:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('company', '0003_auto_20180103_1607'),
]
operations = [
migrations.CreateModel(
name='Eshop',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('col1', models.CharField(blank=True, max_length=250, null=True)),
('col2', models.CharField(blank=True, max_length=250, null=True)),
('col3', models.CharField(blank=True, max_length=250, null=True)),
('col4', models.TextField(blank=True, null=True)),
('col5', models.CharField(blank=True, max_length=250, null=True)),
('col6', models.CharField(blank=True, max_length=250, null=True)),
('col7', models.CharField(blank=True, max_length=250, null=True)),
('col8', models.CharField(blank=True, max_length=250, null=True)),
('col9', models.CharField(blank=True, max_length=250, null=True)),
('col10', models.CharField(blank=True, max_length=250, null=True)),
('col11', models.CharField(blank=True, max_length=250, null=True)),
('col12', models.CharField(blank=True, max_length=250, null=True)),
('col13', models.CharField(blank=True, max_length=250, null=True)),
('col14', models.CharField(blank=True, max_length=250, null=True)),
('col15', models.CharField(blank=True, max_length=250, null=True)),
('col16', models.CharField(blank=True, max_length=250, null=True)),
('col17', models.CharField(blank=True, max_length=250, null=True)),
('col18', models.CharField(blank=True, max_length=250, null=True)),
('col19', models.CharField(blank=True, max_length=250, null=True)),
('col20', models.CharField(blank=True, max_length=250, null=True)),
('col21', models.CharField(blank=True, max_length=250, null=True)),
('col22', models.CharField(blank=True, max_length=250, null=True)),
('col23', models.CharField(blank=True, max_length=250, null=True)),
('col24', models.CharField(blank=True, max_length=250, null=True)),
('col25', models.CharField(blank=True, max_length=250, null=True)),
('col26', models.CharField(blank=True, max_length=250, null=True)),
('col27', models.CharField(blank=True, max_length=250, null=True)),
('col28', models.CharField(blank=True, max_length=250, null=True)),
('col29', models.CharField(blank=True, max_length=250, null=True)),
('col30', models.CharField(blank=True, max_length=250, null=True)),
('col31', models.CharField(blank=True, max_length=250, null=True)),
('col32', models.CharField(blank=True, max_length=250, null=True)),
('col33', models.CharField(blank=True, max_length=250, null=True)),
('col34', models.CharField(blank=True, max_length=250, null=True)),
('col35', models.CharField(blank=True, max_length=250, null=True)),
('col36', models.CharField(blank=True, max_length=250, null=True)),
('col37', models.CharField(blank=True, max_length=250, null=True)),
('col38', models.CharField(blank=True, max_length=250, null=True)),
('col39', models.CharField(blank=True, max_length=250, null=True)),
('col40', models.CharField(blank=True, max_length=250, null=True)),
('col41', models.CharField(blank=True, max_length=250, null=True)),
('col42', models.CharField(blank=True, max_length=250, null=True)),
('col43', models.CharField(blank=True, max_length=250, null=True)),
('col44', models.CharField(blank=True, max_length=250, null=True)),
('col45', models.CharField(blank=True, max_length=250, null=True)),
('col46', models.CharField(blank=True, max_length=250, null=True)),
('col47', models.CharField(blank=True, max_length=250, null=True)),
('col48', models.CharField(blank=True, max_length=250, null=True)),
('col49', models.CharField(blank=True, max_length=250, null=True)),
('col50', models.CharField(blank=True, max_length=250, null=True)),
('col51', models.CharField(blank=True, max_length=250, null=True)),
('col52', models.CharField(blank=True, max_length=250, null=True)),
('col53', models.CharField(blank=True, max_length=250, null=True)),
('col54', models.CharField(blank=True, max_length=250, null=True)),
('col55', models.CharField(blank=True, max_length=250, null=True)),
('col56', models.CharField(blank=True, max_length=250, null=True)),
('col57', models.CharField(blank=True, max_length=250, null=True)),
('col58', models.CharField(blank=True, max_length=250, null=True)),
('col59', models.CharField(blank=True, max_length=250, null=True)),
('col60', models.CharField(blank=True, max_length=250, null=True)),
('col61', models.CharField(blank=True, max_length=250, null=True)),
('col62', models.CharField(blank=True, max_length=250, null=True)),
('col63', models.CharField(blank=True, max_length=250, null=True)),
('col64', models.CharField(blank=True, max_length=250, null=True)),
('col65', models.CharField(blank=True, max_length=250, null=True)),
('col66', models.CharField(blank=True, max_length=250, null=True)),
('col67', models.CharField(blank=True, max_length=250, null=True)),
('col68', models.CharField(blank=True, max_length=250, null=True)),
('col69', models.CharField(blank=True, max_length=250, null=True)),
('col70', models.CharField(blank=True, max_length=250, null=True)),
('col71', models.CharField(blank=True, max_length=250, null=True)),
('col72', models.CharField(blank=True, max_length=250, null=True)),
('col73', models.CharField(blank=True, max_length=250, null=True)),
('col74', models.CharField(blank=True, max_length=250, null=True)),
('col75', models.CharField(blank=True, max_length=250, null=True)),
('col76', models.CharField(blank=True, max_length=250, null=True)),
('col77', models.CharField(blank=True, max_length=250, null=True)),
('col78', models.CharField(blank=True, max_length=250, null=True)),
('col79', models.CharField(blank=True, max_length=250, null=True)),
('col80', models.CharField(blank=True, max_length=250, null=True)),
('col81', models.CharField(blank=True, max_length=250, null=True)),
('col82', models.CharField(blank=True, max_length=250, null=True)),
('col83', models.CharField(blank=True, max_length=250, null=True)),
('col84', models.CharField(blank=True, max_length=250, null=True)),
('col85', models.CharField(blank=True, max_length=250, null=True)),
('col86', models.CharField(blank=True, max_length=250, null=True)),
('col87', models.CharField(blank=True, max_length=250, null=True)),
],
options={
'db_table': 'parce_eshop',
},
),
]
| 70.357143
| 114
| 0.592005
| 928
| 7,880
| 4.920259
| 0.153017
| 0.171485
| 0.376697
| 0.452037
| 0.828734
| 0.828734
| 0.828734
| 0.828734
| 0.828734
| 0
| 0
| 0.076394
| 0.244162
| 7,880
| 111
| 115
| 70.990991
| 0.690228
| 0.008503
| 0
| 0
| 1
| 0
| 0.061972
| 0.002945
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.048077
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2168e0bc667e07663ab9c303588fdb7f39c448ce
| 2,079
|
py
|
Python
|
encrypt_decrypt_app/nato_code_tests.py
|
Chika-Jinanwa/chikas-cipher
|
91e7139f11132c885bd8872d705ffe3d0ca51b7d
|
[
"MIT"
] | null | null | null |
encrypt_decrypt_app/nato_code_tests.py
|
Chika-Jinanwa/chikas-cipher
|
91e7139f11132c885bd8872d705ffe3d0ca51b7d
|
[
"MIT"
] | 9
|
2021-03-30T14:05:43.000Z
|
2022-03-12T00:44:58.000Z
|
encrypt_decrypt_app/nato_code_tests.py
|
Chika-Jinanwa/chikas-cipher
|
91e7139f11132c885bd8872d705ffe3d0ca51b7d
|
[
"MIT"
] | null | null | null |
import unittest
from nato_code import NatoCode
test = NatoCode()
class NatoCodeEncryptTests(unittest.TestCase):
def test_empty_string(self):
self.assertMultiLineEqual(test.encrypt(''), '')
def test_string_with_only_spaces(self):
self.assertMultiLineEqual(test.encrypt(' '), ' ')
def test_string_lower_case(self):
self.assertMultiLineEqual(test.encrypt('abct'), 'alfa bravo charlie tango ')
def test_string_upper_case(self):
self.assertMultiLineEqual(
test.encrypt('ABC'),
'alfa bravo charlie ')
def test_multi_word_lower(self):
self.assertMultiLineEqual(test.encrypt('abc wvu'), 'alfa bravo charlie whiskey victor uniform ')
def test_multi_word_upper(self):
self.assertMultiLineEqual(test.encrypt('ABC WVU'), 'alfa bravo charlie whiskey victor uniform ')
def test_alphanumeric(self):
self.assertMultiLineEqual(test.encrypt('ABC WVU123'), 'alfa bravo charlie whiskey victor uniform one two three ')
class NatoCodeDecryptTests(unittest.TestCase):
def test_empty_string(self):
self.assertMultiLineEqual(test.decrypt(' '), ' ')
def test_string_with_only_spaces(self):
self.assertMultiLineEqual(test.decrypt(' '), ' ')
def test_string_lower_case(self):
self.assertMultiLineEqual(test.decrypt('alfa bravo charlie tango '), 'abct ')
def test_string_upper_case(self):
self.assertMultiLineEqual(
test.decrypt('alfa bravo charlie ').upper(),
'ABC ')
def test_multi_word_lower(self):
self.assertMultiLineEqual(test.decrypt('alfa bravo charlie whiskey victor uniform '), 'abc wvu ')
def test_multi_word_upper(self):
self.assertMultiLineEqual(test.decrypt('alfa bravo charlie whiskey victor uniform ').upper(), 'ABC WVU ')
def test_alphanumeric(self):
self.assertMultiLineEqual(test.decrypt('alfa bravo charlie whiskey victor uniform one two three '), 'abc wvu123 ')
if __name__ == '__main__':
unittest.main()
unittest.main()
| 35.237288
| 123
| 0.69216
| 234
| 2,079
| 5.948718
| 0.188034
| 0.070402
| 0.281609
| 0.321839
| 0.837644
| 0.837644
| 0.818247
| 0.772989
| 0.752874
| 0.467672
| 0
| 0.003608
| 0.200096
| 2,079
| 59
| 124
| 35.237288
| 0.833434
| 0
| 0
| 0.55
| 0
| 0
| 0.225481
| 0
| 0
| 0
| 0
| 0
| 0.35
| 1
| 0.35
| false
| 0
| 0.05
| 0
| 0.45
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
216bce58ff921609471083f634948291cf35b2e9
| 18,598
|
py
|
Python
|
test/test_docker_compose.py
|
kudulab/dojo
|
926d16f754d1221f8e015b95fdb49bf4e951fdba
|
[
"Apache-2.0"
] | 256
|
2019-09-13T13:33:09.000Z
|
2022-03-22T12:55:10.000Z
|
test/test_docker_compose.py
|
kudulab/dojo
|
926d16f754d1221f8e015b95fdb49bf4e951fdba
|
[
"Apache-2.0"
] | 16
|
2019-09-13T12:37:14.000Z
|
2022-02-20T11:47:24.000Z
|
test/test_docker_compose.py
|
kudulab/dojo
|
926d16f754d1221f8e015b95fdb49bf4e951fdba
|
[
"Apache-2.0"
] | 12
|
2019-09-13T09:09:38.000Z
|
2021-10-03T21:21:36.000Z
|
import os
from .support.common import *
def clean_up_dc_dojofile():
try:
os.remove(os.path.join(project_root, 'test/test-files/itest-dc.yaml.dojo'))
except FileNotFoundError:
pass
def test_dc_dojofile_is_removed():
assert not os.path.exists(os.path.join(project_root, 'test/test-files/itest-dc.yaml.dojo'))
def clean_up_dc_containers():
run_command('docker', ['stop', 'testdojorunid_default_run_1'])
run_command('docker', ['stop', 'testdojorunid_abc_1'])
run_command('docker', ['rm', 'testdojorunid_default_run_1'])
run_command('docker', ['rm', 'testdojorunid_abc_1'])
def test_dc_containers_are_removed():
result = run_command('docker', ['ps', '-a', '--filter', 'name=testdojorunid'])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert not "testdojorunid" in result.stdout, dojo_combined_output_str
assert result.returncode == 0
def clean_up_dc_network():
run_command('docker', ['network', 'rm', 'testdojorunid_default'])
def test_dc_network_is_removed():
result = run_command('docker', ['network', 'ls', '--filter', "name=testdojorunid"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert not "testdojorunid" in result.stdout, dojo_combined_output_str
assert result.returncode == 0
def test_docker_compose_run_when_exit_zero():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
result = run_dojo("--driver=docker-compose --dcf=./test/test-files/itest-dc.yaml --debug=true --test=true --image=alpine:3.8 whoami".split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert result.returncode == 0
assert 'root' in result.stdout, dojo_combined_output_str
assert 'whoami' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_run_command_output_capture():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
result = run_dojo(['--driver=docker-compose', '--dcf=./test/test-files/itest-dc.yaml', '--debug=true', '--test=true', '--image=alpine:3.8', 'sh', '-c', "printenv HOME"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert result.stdout == '/root\n', dojo_combined_output_str
assert "Exit status from run command: 0" in result.stderr, dojo_combined_output_str
assert "Exit status from cleaning: 0" in result.stderr, dojo_combined_output_str
assert "Exit status from signals: 0" in result.stderr, dojo_combined_output_str
assert "Dojo version" in result.stderr
def test_docker_compose_run_when_exit_non_zero():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
result = run_dojo("--driver=docker-compose --dcf=./test/test-files/itest-dc.yaml --debug=true --test=true --image=alpine:3.8 notexistentcommand".split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert "Current shell is interactive: false" in result.stderr, dojo_combined_output_str
assert "exec notexistentcommand failed: No such file or directory" in result.stderr, dojo_combined_output_str
assert "Exit status from run command: 127" in result.stderr, dojo_combined_output_str
assert 127 == result.returncode
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_run_when_double_dash_command_split():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
result = run_dojo("--driver=docker-compose --dcf=./test/test-files/itest-dc.yaml --debug=true --test=true --image=alpine:3.8 -- whoami".split())
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert result.returncode == 0
assert 'root' in result.stdout, dojo_combined_output_str
assert 'whoami' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_run_when_shell_command():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
result = run_dojo(['--driver=docker-compose', '--dcf=./test/test-files/itest-dc.yaml', '--debug=true', '--test=true', '--image=alpine:3.8', 'sh', '-c', 'echo hello'])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'hello' in result.stdout, dojo_combined_output_str
assert result.returncode == 0
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_run_preserves_env_vars():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
envs = dict(os.environ)
envs['ABC'] ='custom_value'
result = run_dojo(['--driver=docker-compose', '--dcf=./test/test-files/itest-dc.yaml', '--debug=true', '--test=true', '--image=alpine:3.8', 'sh', '-c', 'env | grep ABC'],
env=envs)
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'custom_value' in result.stdout, dojo_combined_output_str
assert '1234' in result.stdout, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert result.returncode == 0
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_run_preserves_multiline_env_vars():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
envs = dict(os.environ)
envs['ABC'] = """first line
second line"""
result = run_dojo(['--driver=docker-compose', '--dcf=./test/test-files/itest-dc.yaml', '--debug=true', '--test=true',
'--image=alpine:3.8', 'sh', '-c', '"source /etc/dojo.d/variables/00-multiline-vars.sh && env | grep -A 1 ABC"'],
env=envs)
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert '/etc/dojo.d/variables/00-multiline-vars.sh' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
assert 'Exit status from run command:' in result.stderr, dojo_combined_output_str
assert """first line
second line""" in result.stdout
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
# see also: test_docker_preserves_bash_functions_from_env_vars for more comments
def test_docker_compose_run_preserves_bash_functions():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
envs = dict(os.environ)
proc = run_dojo_and_set_bash_func(
['--driver=docker-compose', '--dcf=./test/test-files/itest-dc.yaml', '--debug=true', '--test=true',
'--image=alpine:3.8', 'sh', '-c',
'"apk add -U bash && bash -c \'source /etc/dojo.d/variables/01-bash-functions.sh && my_bash_func\'"'],
env=envs)
stdout_value_bytes, stderr_value_bytes = proc.communicate()
stdout = str(stdout_value_bytes)
stderr = str(stderr_value_bytes)
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(stdout, stderr)
assert 'Dojo version' in stderr, dojo_combined_output_str
assert 'Written file /tmp/test-dojo-environment-bash-functions-testdojorunid, contents:' in stderr, dojo_combined_output_str
assert 'my_bash_func() { echo "hello"' in stderr, dojo_combined_output_str
assert '/etc/dojo.d/variables/01-bash-functions.sh' in stderr, dojo_combined_output_str
assert_no_warnings_or_errors(stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(stdout, dojo_combined_output_str)
# the bash function was invoked
assert 'hello' in stdout, dojo_combined_output_str
assert 'Exit status from run command: 0' in stderr, dojo_combined_output_str
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_pull():
result = run_dojo('--driver=docker-compose --dcf=./test/test-files/itest-dc.yaml --debug=true --action=pull --image=alpine:3.8'.split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'pulling' in result.stderr, dojo_combined_output_str
assert "Exit status from pull command: 0" in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
def test_docker_compose_pull_when_no_such_image_exists():
result = run_dojo('--driver=docker-compose --dcf=./test/test-files/itest-dc.yaml --debug=true --action=pull --image=no_such_image91291925129q783187314218194:abc111aaa.9981412'.split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'repository does not exist or may require \'docker login\'' in result.stderr, dojo_combined_output_str
assert "Exit status from pull command: 1" in result.stderr, dojo_combined_output_str
assert "" == result.stdout, dojo_combined_output_str
assert result.returncode == 1
def test_docker_compose_dojo_work_variables():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
os.makedirs(os.path.join(project_root, 'test/test-files/custom-dir-env-var'), exist_ok=True)
with open(os.path.join(project_root, 'test/test-files/custom-dir-env-var/file1.txt'), 'w') as f:
f.write('123')
result = run_dojo(['--driver=docker-compose', '--dcf=./test/test-files/itest-dc-env-var.yaml',
'--debug=true', '--test=true', '--image=alpine:3.8', '--', 'sh',
'-c', "cat /dojo/work/custom-dir/file1.txt"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert "Dojo version" in result.stderr, dojo_combined_output_str
assert not "DOJO_WORK_OUTER variable is not set" in result.stderr, dojo_combined_output_str
assert not "DOJO_WORK_INNER variable is not set" in result.stderr, dojo_combined_output_str
assert '123' in result.stdout, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_run_shows_nondefault_containers_logs_when_all_constainers_succeeded():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
# make the command of the default container last long enough so that the other
# container is started and managed to produce some output
result = run_dojo(['--driver=docker-compose', '--dcf=./test/test-files/itest-dc-verbose.yaml',
'--print-logs=always',
'--debug=true', '--test=true', '--image=alpine:3.8', '--', 'sh',
'-c', "echo 1; sleep 1; echo 2; sleep 1;"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert result.returncode == 0
assert 'echo 1; sleep 1; echo 2; sleep 1;' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Here are logs of container: testdojorunid_abc_1' in result.stderr, dojo_combined_output_str
assert 'which status is: running' in result.stderr, dojo_combined_output_str
assert 'iteration: 1' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_run_shows_nondefault_containers_logs_when_nondefault_container_failed():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
# make the command of the default container last long enough so that the other
# container is started and managed to produce some output
result = run_dojo(['--driver=docker-compose', '--dcf=./test/test-files/itest-dc-verbose-fail.yaml',
'--print-logs=always',
'--debug=true', '--test=true', '--image=alpine:3.8', '--', 'sh',
'-c', "echo 1; sleep 1; echo 2; sleep 1;"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert result.returncode == 0
assert 'echo 1; sleep 1; echo 2; sleep 1;' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Here are logs of container: testdojorunid_abc_1' in result.stderr, dojo_combined_output_str
assert 'which exited with exitcode: 127' in result.stderr, dojo_combined_output_str
assert 'some-non-existent-command: not found' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def test_docker_compose_run_shows_nondefault_containers_logs_when_default_container_failed():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
# make the command of the default container last long enough so that the other
# container is started and managed to produce some output
result = run_dojo("--driver=docker-compose --dcf=./test/test-files/itest-dc-verbose.yaml --print-logs=failure --debug=true --test=true --image=alpine:3.8 -- some-non-existent-command".split())
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert result.returncode == 127
assert 'Exit status from run command: 127' in result.stderr, dojo_combined_output_str
assert 'Here are logs of container: testdojorunid_abc_1' in result.stderr, dojo_combined_output_str
assert 'which status is: running' in result.stderr, dojo_combined_output_str
assert 'iteration: 1' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
def clean_up_dojo_logs_file(logs_file):
try:
os.remove(os.path.join(project_root, logs_file))
except FileNotFoundError:
pass
def test_docker_compose_run_shows_nondefault_containers_logs_when_all_constainers_succeeded_print_logs_to_file():
clean_up_dc_containers()
clean_up_dc_network()
clean_up_dc_dojofile()
logs_file = "dojo-logs-testdojorunid_abc_1-testdojorunid.txt"
clean_up_dojo_logs_file(logs_file)
# make the command of the default container last long enough so that the other
# container is started and managed to produce some output
result = run_dojo(['--driver=docker-compose', '--dcf=./test/test-files/itest-dc-verbose.yaml',
'--print-logs=always', '--print-logs-target=file',
'--debug=false', '--test=true', '--image=alpine:3.8', '--', 'sh',
'-c', "echo 1; sleep 1; echo 2; sleep 1;"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert result.returncode == 0
assert 'echo 1; sleep 1; echo 2; sleep 1;' in result.stderr, dojo_combined_output_str
assert 'The logs of container: testdojorunid_abc_1, which status is: running, were saved to file: dojo-logs-testdojorunid_abc_1-testdojorunid.txt' in result.stderr, dojo_combined_output_str
with open(logs_file, "r") as file:
contents = file.readlines()
assert 'iteration: 1\n' in contents
assert 'stdout:\n' in contents
assert 'stderr:\n' in contents
assert 'iteration: 1' not in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
test_dc_dojofile_is_removed()
test_dc_containers_are_removed()
test_dc_network_is_removed()
clean_up_dojo_logs_file(logs_file)
| 53.751445
| 196
| 0.740994
| 2,701
| 18,598
| 4.768604
| 0.07997
| 0.097826
| 0.146739
| 0.171196
| 0.885947
| 0.863665
| 0.850776
| 0.825233
| 0.793401
| 0.779193
| 0
| 0.012334
| 0.145553
| 18,598
| 345
| 197
| 53.907246
| 0.798188
| 0.034412
| 0
| 0.62069
| 0
| 0.027586
| 0.267008
| 0.116454
| 0
| 0
| 0
| 0
| 0.372414
| 1
| 0.075862
| false
| 0.006897
| 0.006897
| 0
| 0.082759
| 0.02069
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dcbc39bc63dbc1304dd98cc503678e50288174cf
| 19,266
|
py
|
Python
|
tests/test_init.py
|
bieniu/pygios
|
23b3d51cbdaf5a4e2e33fef6538d1adf319a52c6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_init.py
|
bieniu/pygios
|
23b3d51cbdaf5a4e2e33fef6538d1adf319a52c6
|
[
"Apache-2.0"
] | 2
|
2020-06-12T13:37:23.000Z
|
2021-09-29T18:47:03.000Z
|
tests/test_init.py
|
bieniu/pygios
|
23b3d51cbdaf5a4e2e33fef6538d1adf319a52c6
|
[
"Apache-2.0"
] | 3
|
2020-11-13T11:56:37.000Z
|
2021-04-22T13:49:50.000Z
|
"""Tests for gios package."""
import json
import aiohttp
import pytest
from aioresponses import aioresponses
from gios import ApiError, Gios, InvalidSensorsData, NoStationError
INVALID_STATION_ID = 0
VALID_STATION_ID = 552
VALID_STATION_NAME = "Test Name"
VALID_LATITUDE = 99.99
VALID_LONGITUDE = 88.88
@pytest.mark.asyncio
async def test_valid_data_first_value(): # pylint:disable=too-many-statements
"""Test with valid data and valid first sensor's value."""
with open("tests/fixtures/stations.json", encoding="utf-8") as file:
stations = json.load(file)
with open("tests/fixtures/station.json", encoding="utf-8") as file:
station = json.load(file)
with open("tests/fixtures/sensor_658.json", encoding="utf-8") as file:
sensor_658 = json.load(file)
with open("tests/fixtures/sensor_660.json", encoding="utf-8") as file:
sensor_660 = json.load(file)
with open("tests/fixtures/sensor_665.json", encoding="utf-8") as file:
sensor_665 = json.load(file)
with open("tests/fixtures/sensor_667.json", encoding="utf-8") as file:
sensor_667 = json.load(file)
with open("tests/fixtures/sensor_670.json", encoding="utf-8") as file:
sensor_670 = json.load(file)
with open("tests/fixtures/sensor_672.json", encoding="utf-8") as file:
sensor_672 = json.load(file)
with open("tests/fixtures/sensor_14395.json", encoding="utf-8") as file:
sensor_14395 = json.load(file)
with open("tests/fixtures/indexes.json", encoding="utf-8") as file:
indexes = json.load(file)
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
payload=stations,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/station/sensors/{VALID_STATION_ID}",
payload=station,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/672",
payload=sensor_672,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/658",
payload=sensor_658,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/660",
payload=sensor_660,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/665",
payload=sensor_665,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/667",
payload=sensor_667,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/670",
payload=sensor_670,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/14395",
payload=sensor_14395,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/aqindex/getIndex/{VALID_STATION_ID}",
payload=indexes,
)
gios = Gios(VALID_STATION_ID, session)
data = await gios.async_update()
await session.close()
assert gios.station_name == VALID_STATION_NAME
assert gios.station_id == VALID_STATION_ID
assert gios.latitude == VALID_LATITUDE
assert gios.longitude == VALID_LONGITUDE
assert data.so2.value == 11.6502
assert data.so2.index == "very good"
assert data.c6h6.value == 2.57148
assert data.c6h6.index == "very good"
assert data.co.value == 786.702
assert data.co.index == "very good"
assert data.no2.value == 59.9545
assert data.no2.index == "very good"
assert data.o3.value == 8.63111
assert data.o3.index == "good"
assert data.pm25.value == 59.9428
assert data.pm25.index == "very good"
assert data.pm10.value == 123.879
assert data.pm10.index == "very good"
assert data.aqi.value == "good"
@pytest.mark.asyncio
async def test_api_error():
"""Test GIOS API error."""
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
status=404,
)
gios = Gios(VALID_STATION_ID, session)
try:
await gios.async_update()
except ApiError as error:
assert str(error.status) == "404"
await session.close()
@pytest.mark.asyncio
async def test_valid_data_second_value(): # pylint:disable=too-many-statements
"""Test with valid data and valid second sensor's value."""
with open("tests/fixtures/stations.json", encoding="utf-8") as file:
stations = json.load(file)
with open("tests/fixtures/station.json", encoding="utf-8") as file:
station = json.load(file)
with open("tests/fixtures/sensor_658.json", encoding="utf-8") as file:
sensor_658 = json.load(file)
with open("tests/fixtures/sensor_660.json", encoding="utf-8") as file:
sensor_660 = json.load(file)
with open("tests/fixtures/sensor_665.json", encoding="utf-8") as file:
sensor_665 = json.load(file)
with open("tests/fixtures/sensor_667.json", encoding="utf-8") as file:
sensor_667 = json.load(file)
with open("tests/fixtures/sensor_670.json", encoding="utf-8") as file:
sensor_670 = json.load(file)
with open("tests/fixtures/sensor_672.json", encoding="utf-8") as file:
sensor_672 = json.load(file)
with open("tests/fixtures/sensor_14395.json", encoding="utf-8") as file:
sensor_14395 = json.load(file)
with open("tests/fixtures/indexes.json", encoding="utf-8") as file:
indexes = json.load(file)
sensor_658["values"][0]["value"] = None
sensor_660["values"][0]["value"] = None
sensor_665["values"][0]["value"] = None
sensor_667["values"][0]["value"] = None
sensor_670["values"][0]["value"] = None
sensor_672["values"][0]["value"] = None
sensor_14395["values"][0]["value"] = None
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
payload=stations,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/station/sensors/{VALID_STATION_ID}",
payload=station,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/672",
payload=sensor_672,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/658",
payload=sensor_658,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/660",
payload=sensor_660,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/665",
payload=sensor_665,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/667",
payload=sensor_667,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/670",
payload=sensor_670,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/14395",
payload=sensor_14395,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/aqindex/getIndex/{VALID_STATION_ID}",
payload=indexes,
)
gios = Gios(VALID_STATION_ID, session)
data = await gios.async_update()
await session.close()
assert gios.station_name == VALID_STATION_NAME
assert gios.station_id == VALID_STATION_ID
assert gios.latitude == VALID_LATITUDE
assert gios.longitude == VALID_LONGITUDE
assert data.so2.value == 11.501
assert data.so2.index == "very good"
assert data.c6h6.value == 3.24432
assert data.c6h6.index == "very good"
assert data.co.value == 1041.74
assert data.co.index == "very good"
assert data.no2.value == 52.6198
assert data.no2.index == "very good"
assert data.o3.value == 4.93778
assert data.o3.index == "good"
assert data.pm25.value == 72.0243
assert data.pm25.index == "very good"
assert data.pm10.value == 115.559
assert data.pm10.index == "very good"
assert data.aqi.value == "good"
@pytest.mark.asyncio
async def test_no_indexes_data(): # pylint: disable=too-many-statements
"""Test with valid data."""
with open("tests/fixtures/stations.json", encoding="utf-8") as file:
stations = json.load(file)
with open("tests/fixtures/station.json", encoding="utf-8") as file:
station = json.load(file)
with open("tests/fixtures/sensor_658.json", encoding="utf-8") as file:
sensor_658 = json.load(file)
with open("tests/fixtures/sensor_660.json", encoding="utf-8") as file:
sensor_660 = json.load(file)
with open("tests/fixtures/sensor_665.json", encoding="utf-8") as file:
sensor_665 = json.load(file)
with open("tests/fixtures/sensor_667.json", encoding="utf-8") as file:
sensor_667 = json.load(file)
with open("tests/fixtures/sensor_670.json", encoding="utf-8") as file:
sensor_670 = json.load(file)
with open("tests/fixtures/sensor_672.json", encoding="utf-8") as file:
sensor_672 = json.load(file)
with open("tests/fixtures/sensor_14395.json", encoding="utf-8") as file:
sensor_14395 = json.load(file)
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
payload=stations,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/station/sensors/{VALID_STATION_ID}",
payload=station,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/672",
payload=sensor_672,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/658",
payload=sensor_658,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/660",
payload=sensor_660,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/665",
payload=sensor_665,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/667",
payload=sensor_667,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/670",
payload=sensor_670,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/14395",
payload=sensor_14395,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/aqindex/getIndex/{VALID_STATION_ID}",
payload={},
)
gios = Gios(VALID_STATION_ID, session)
data = await gios.async_update()
await session.close()
assert gios.station_name == VALID_STATION_NAME
assert gios.station_id == VALID_STATION_ID
assert gios.latitude == VALID_LATITUDE
assert gios.longitude == VALID_LONGITUDE
assert data.so2.value == 11.6502
assert data.so2.index is None
assert data.c6h6.value == 2.57148
assert data.c6h6.index is None
assert data.co.value == 786.702
assert data.co.index is None
assert data.no2.value == 59.9545
assert data.no2.index is None
assert data.o3.value == 8.63111
assert data.o3.index is None
assert data.pm25.value == 59.9428
assert data.pm25.index is None
assert data.pm10.value == 123.879
assert data.pm10.index is None
assert data.aqi is None
@pytest.mark.asyncio
async def test_no_sensor_data_1(): # pylint:disable=too-many-statements
"""Test with no sensor data."""
with open("tests/fixtures/stations.json", encoding="utf-8") as file:
stations = json.load(file)
with open("tests/fixtures/station.json", encoding="utf-8") as file:
station = json.load(file)
with open("tests/fixtures/sensor_658.json", encoding="utf-8") as file:
sensor_658 = json.load(file)
with open("tests/fixtures/sensor_660.json", encoding="utf-8") as file:
sensor_660 = json.load(file)
with open("tests/fixtures/sensor_665.json", encoding="utf-8") as file:
sensor_665 = json.load(file)
with open("tests/fixtures/sensor_667.json", encoding="utf-8") as file:
sensor_667 = json.load(file)
with open("tests/fixtures/sensor_670.json", encoding="utf-8") as file:
sensor_670 = json.load(file)
with open("tests/fixtures/sensor_672.json", encoding="utf-8") as file:
sensor_672 = json.load(file)
with open("tests/fixtures/sensor_14395.json", encoding="utf-8") as file:
sensor_14395 = json.load(file)
with open("tests/fixtures/indexes.json", encoding="utf-8") as file:
indexes = json.load(file)
sensor_658["values"][0]["value"] = None
sensor_658["values"][1]["value"] = None
sensor_660["values"][0]["value"] = None
sensor_660["values"][1]["value"] = None
sensor_665["values"][0]["value"] = None
sensor_665["values"][1]["value"] = None
sensor_667["values"][0]["value"] = None
sensor_667["values"][1]["value"] = None
sensor_670["values"][0]["value"] = None
sensor_670["values"][1]["value"] = None
sensor_672["values"][0]["value"] = None
sensor_672["values"][1]["value"] = None
sensor_14395["values"][0]["value"] = None
sensor_14395["values"][1]["value"] = None
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
payload=stations,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/station/sensors/{VALID_STATION_ID}",
payload=station,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/672",
payload=sensor_672,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/658",
payload=sensor_658,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/660",
payload=sensor_660,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/665",
payload=sensor_665,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/667",
payload=sensor_667,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/670",
payload=sensor_670,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/14395",
payload=sensor_14395,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/aqindex/getIndex/{VALID_STATION_ID}",
payload=indexes,
)
gios = Gios(VALID_STATION_ID, session)
try:
await gios.async_update()
except InvalidSensorsData as error:
assert str(error.status) == "Invalid sensor data from GIOS API"
await session.close()
@pytest.mark.asyncio
async def test_invalid_sensor_data_2():
"""Test with invalid sensor data."""
with open("tests/fixtures/stations.json", encoding="utf-8") as file:
stations = json.load(file)
with open("tests/fixtures/station.json", encoding="utf-8") as file:
station = json.load(file)
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
payload=stations,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/station/sensors/{VALID_STATION_ID}",
payload=station,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/672",
payload=None,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/658",
payload=None,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/660",
payload=None,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/665",
payload=None,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/667",
payload=None,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/670",
payload=None,
)
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/data/getData/14395",
payload=None,
)
gios = Gios(VALID_STATION_ID, session)
try:
await gios.async_update()
except InvalidSensorsData as error:
assert str(error.status) == "Invalid sensor data from GIOS API"
await session.close()
@pytest.mark.asyncio
async def test_no_station_data():
"""Test with no station data."""
with open("tests/fixtures/stations.json", encoding="utf-8") as file:
stations = json.load(file)
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
payload=stations,
)
session_mock.get(
f"http://api.gios.gov.pl/pjp-api/rest/station/sensors/{VALID_STATION_ID}",
payload={},
)
gios = Gios(VALID_STATION_ID, session)
try:
await gios.async_update()
except InvalidSensorsData as error:
assert str(error.status) == "Invalid measuring station data from GIOS API"
await session.close()
@pytest.mark.asyncio
async def test_no_stations_data():
"""Test with no stations data."""
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
payload={},
)
gios = Gios(VALID_STATION_ID, session)
try:
await gios.async_update()
except ApiError as error:
assert str(error.status) == "Invalid measuring stations list from GIOS API"
await session.close()
@pytest.mark.asyncio
async def test_invalid_station_id():
"""Test with invalid station_id."""
with open("tests/fixtures/stations.json", encoding="utf-8") as file:
stations = json.load(file)
session = aiohttp.ClientSession()
with aioresponses() as session_mock:
session_mock.get(
"http://api.gios.gov.pl/pjp-api/rest/station/findAll",
payload=stations,
)
gios = Gios(INVALID_STATION_ID, session)
try:
await gios.async_update()
except NoStationError as error:
assert str(error.status) == "0 is not a valid measuring station ID"
await session.close()
| 35.677778
| 87
| 0.61445
| 2,554
| 19,266
| 4.526233
| 0.051292
| 0.059948
| 0.065398
| 0.065398
| 0.948702
| 0.943166
| 0.94083
| 0.929325
| 0.918945
| 0.875433
| 0
| 0.049873
| 0.246496
| 19,266
| 539
| 88
| 35.74397
| 0.746435
| 0.008564
| 0
| 0.783439
| 0
| 0.095541
| 0.268035
| 0.067182
| 0
| 0
| 0
| 0
| 0.133758
| 1
| 0
| false
| 0
| 0.010616
| 0
| 0.010616
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dcf7b27ea9b8ffae10299c628ad89789498495d8
| 2,103
|
py
|
Python
|
PYTHON/WebBackup/flask_harrier_backup/request_common.py
|
YizheZhang-Ervin/BackupPrograms
|
343cd9f0591b750e5b5b1cca80b806ad76286697
|
[
"MIT"
] | null | null | null |
PYTHON/WebBackup/flask_harrier_backup/request_common.py
|
YizheZhang-Ervin/BackupPrograms
|
343cd9f0591b750e5b5b1cca80b806ad76286697
|
[
"MIT"
] | null | null | null |
PYTHON/WebBackup/flask_harrier_backup/request_common.py
|
YizheZhang-Ervin/BackupPrograms
|
343cd9f0591b750e5b5b1cca80b806ad76286697
|
[
"MIT"
] | null | null | null |
import requests
# get Response from HTML
def hunt_get(url, x=None, concat_str=None, return_content=None):
try:
if not str(url).startswith('http'):
url = 'https://' + url
if concat_str is None and x is not None:
r = requests.get(url, x)
elif concat_str is not None and x is None:
r = requests.get(url+concat_str)
elif concat_str is not None and x is not None:
r = requests.get(url+concat_str, x)
else:
r = requests.get(url)
r.raise_for_status()
r.encoding = r.apparent_encoding
if return_content == 'header':
return r.headers
elif return_content == 'json':
return r.json()
elif return_content == 'content':
return r.content
else:
return r.text
except Exception:
return 'sth wrong with requests'
# post Response from HTML
def hunt_post(url, x=None, concat_str=None, return_content=None):
try:
if not str(url).startswith('http'):
url = 'https://' + url
if concat_str is None and x is not None:
r = requests.post(url, x)
elif concat_str is not None and x is None:
r = requests.post(url+concat_str)
elif concat_str is not None and x is not None:
r = requests.post(url+concat_str, x)
else:
r = requests.post(url)
r.raise_for_status()
r.encoding = r.apparent_encoding
if return_content == 'header':
return r.headers
elif return_content == 'json':
return r.json()
elif return_content == 'content':
return r.content
else:
return r.text
except Exception:
return 'sth wrong with requests'
if __name__ == '__main__':
url1 = 'http://item.jd.com/2967929.html'
url2 = 'http://www.so.com/s'
url3 = 'https://www.baidu'
# r1 = hunt(url1, return_content='head')
# r2 = hunt(url2, x="params={'q':'xxx'}")
# r3 = hunt(url3, concat_str='.com')
# print(r1, r2, r3)
| 30.478261
| 65
| 0.565383
| 287
| 2,103
| 4.010453
| 0.216028
| 0.101651
| 0.062554
| 0.052129
| 0.836664
| 0.796699
| 0.796699
| 0.750652
| 0.750652
| 0.750652
| 0
| 0.01338
| 0.324774
| 2,103
| 68
| 66
| 30.926471
| 0.797183
| 0.084641
| 0
| 0.716981
| 0
| 0
| 0.093326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.018868
| 0
| 0.245283
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d27bbd55fe3c3ea001bdaf1b57e9d54027396f6
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_fiora/na_fiora_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_fiora/na_fiora_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_fiora/na_fiora_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Fiora_Jng_Aatrox(Ratings):
pass
class NA_Fiora_Jng_Ahri(Ratings):
pass
class NA_Fiora_Jng_Akali(Ratings):
pass
class NA_Fiora_Jng_Alistar(Ratings):
pass
class NA_Fiora_Jng_Amumu(Ratings):
pass
class NA_Fiora_Jng_Anivia(Ratings):
pass
class NA_Fiora_Jng_Annie(Ratings):
pass
class NA_Fiora_Jng_Ashe(Ratings):
pass
class NA_Fiora_Jng_AurelionSol(Ratings):
pass
class NA_Fiora_Jng_Azir(Ratings):
pass
class NA_Fiora_Jng_Bard(Ratings):
pass
class NA_Fiora_Jng_Blitzcrank(Ratings):
pass
class NA_Fiora_Jng_Brand(Ratings):
pass
class NA_Fiora_Jng_Braum(Ratings):
pass
class NA_Fiora_Jng_Caitlyn(Ratings):
pass
class NA_Fiora_Jng_Camille(Ratings):
pass
class NA_Fiora_Jng_Cassiopeia(Ratings):
pass
class NA_Fiora_Jng_Chogath(Ratings):
pass
class NA_Fiora_Jng_Corki(Ratings):
pass
class NA_Fiora_Jng_Darius(Ratings):
pass
class NA_Fiora_Jng_Diana(Ratings):
pass
class NA_Fiora_Jng_Draven(Ratings):
pass
class NA_Fiora_Jng_DrMundo(Ratings):
pass
class NA_Fiora_Jng_Ekko(Ratings):
pass
class NA_Fiora_Jng_Elise(Ratings):
pass
class NA_Fiora_Jng_Evelynn(Ratings):
pass
class NA_Fiora_Jng_Ezreal(Ratings):
pass
class NA_Fiora_Jng_Fiddlesticks(Ratings):
pass
class NA_Fiora_Jng_Fiora(Ratings):
pass
class NA_Fiora_Jng_Fizz(Ratings):
pass
class NA_Fiora_Jng_Galio(Ratings):
pass
class NA_Fiora_Jng_Gangplank(Ratings):
pass
class NA_Fiora_Jng_Garen(Ratings):
pass
class NA_Fiora_Jng_Gnar(Ratings):
pass
class NA_Fiora_Jng_Gragas(Ratings):
pass
class NA_Fiora_Jng_Graves(Ratings):
pass
class NA_Fiora_Jng_Hecarim(Ratings):
pass
class NA_Fiora_Jng_Heimerdinger(Ratings):
pass
class NA_Fiora_Jng_Illaoi(Ratings):
pass
class NA_Fiora_Jng_Irelia(Ratings):
pass
class NA_Fiora_Jng_Ivern(Ratings):
pass
class NA_Fiora_Jng_Janna(Ratings):
pass
class NA_Fiora_Jng_JarvanIV(Ratings):
pass
class NA_Fiora_Jng_Jax(Ratings):
pass
class NA_Fiora_Jng_Jayce(Ratings):
pass
class NA_Fiora_Jng_Jhin(Ratings):
pass
class NA_Fiora_Jng_Jinx(Ratings):
pass
class NA_Fiora_Jng_Kalista(Ratings):
pass
class NA_Fiora_Jng_Karma(Ratings):
pass
class NA_Fiora_Jng_Karthus(Ratings):
pass
class NA_Fiora_Jng_Kassadin(Ratings):
pass
class NA_Fiora_Jng_Katarina(Ratings):
pass
class NA_Fiora_Jng_Kayle(Ratings):
pass
class NA_Fiora_Jng_Kayn(Ratings):
pass
class NA_Fiora_Jng_Kennen(Ratings):
pass
class NA_Fiora_Jng_Khazix(Ratings):
pass
class NA_Fiora_Jng_Kindred(Ratings):
pass
class NA_Fiora_Jng_Kled(Ratings):
pass
class NA_Fiora_Jng_KogMaw(Ratings):
pass
class NA_Fiora_Jng_Leblanc(Ratings):
pass
class NA_Fiora_Jng_LeeSin(Ratings):
pass
class NA_Fiora_Jng_Leona(Ratings):
pass
class NA_Fiora_Jng_Lissandra(Ratings):
pass
class NA_Fiora_Jng_Lucian(Ratings):
pass
class NA_Fiora_Jng_Lulu(Ratings):
pass
class NA_Fiora_Jng_Lux(Ratings):
pass
class NA_Fiora_Jng_Malphite(Ratings):
pass
class NA_Fiora_Jng_Malzahar(Ratings):
pass
class NA_Fiora_Jng_Maokai(Ratings):
pass
class NA_Fiora_Jng_MasterYi(Ratings):
pass
class NA_Fiora_Jng_MissFortune(Ratings):
pass
class NA_Fiora_Jng_MonkeyKing(Ratings):
pass
class NA_Fiora_Jng_Mordekaiser(Ratings):
pass
class NA_Fiora_Jng_Morgana(Ratings):
pass
class NA_Fiora_Jng_Nami(Ratings):
pass
class NA_Fiora_Jng_Nasus(Ratings):
pass
class NA_Fiora_Jng_Nautilus(Ratings):
pass
class NA_Fiora_Jng_Nidalee(Ratings):
pass
class NA_Fiora_Jng_Nocturne(Ratings):
pass
class NA_Fiora_Jng_Nunu(Ratings):
pass
class NA_Fiora_Jng_Olaf(Ratings):
pass
class NA_Fiora_Jng_Orianna(Ratings):
pass
class NA_Fiora_Jng_Ornn(Ratings):
pass
class NA_Fiora_Jng_Pantheon(Ratings):
pass
class NA_Fiora_Jng_Poppy(Ratings):
pass
class NA_Fiora_Jng_Quinn(Ratings):
pass
class NA_Fiora_Jng_Rakan(Ratings):
pass
class NA_Fiora_Jng_Rammus(Ratings):
pass
class NA_Fiora_Jng_RekSai(Ratings):
pass
class NA_Fiora_Jng_Renekton(Ratings):
pass
class NA_Fiora_Jng_Rengar(Ratings):
pass
class NA_Fiora_Jng_Riven(Ratings):
pass
class NA_Fiora_Jng_Rumble(Ratings):
pass
class NA_Fiora_Jng_Ryze(Ratings):
pass
class NA_Fiora_Jng_Sejuani(Ratings):
pass
class NA_Fiora_Jng_Shaco(Ratings):
pass
class NA_Fiora_Jng_Shen(Ratings):
pass
class NA_Fiora_Jng_Shyvana(Ratings):
pass
class NA_Fiora_Jng_Singed(Ratings):
pass
class NA_Fiora_Jng_Sion(Ratings):
pass
class NA_Fiora_Jng_Sivir(Ratings):
pass
class NA_Fiora_Jng_Skarner(Ratings):
pass
class NA_Fiora_Jng_Sona(Ratings):
pass
class NA_Fiora_Jng_Soraka(Ratings):
pass
class NA_Fiora_Jng_Swain(Ratings):
pass
class NA_Fiora_Jng_Syndra(Ratings):
pass
class NA_Fiora_Jng_TahmKench(Ratings):
pass
class NA_Fiora_Jng_Taliyah(Ratings):
pass
class NA_Fiora_Jng_Talon(Ratings):
pass
class NA_Fiora_Jng_Taric(Ratings):
pass
class NA_Fiora_Jng_Teemo(Ratings):
pass
class NA_Fiora_Jng_Thresh(Ratings):
pass
class NA_Fiora_Jng_Tristana(Ratings):
pass
class NA_Fiora_Jng_Trundle(Ratings):
pass
class NA_Fiora_Jng_Tryndamere(Ratings):
pass
class NA_Fiora_Jng_TwistedFate(Ratings):
pass
class NA_Fiora_Jng_Twitch(Ratings):
pass
class NA_Fiora_Jng_Udyr(Ratings):
pass
class NA_Fiora_Jng_Urgot(Ratings):
pass
class NA_Fiora_Jng_Varus(Ratings):
pass
class NA_Fiora_Jng_Vayne(Ratings):
pass
class NA_Fiora_Jng_Veigar(Ratings):
pass
class NA_Fiora_Jng_Velkoz(Ratings):
pass
class NA_Fiora_Jng_Vi(Ratings):
pass
class NA_Fiora_Jng_Viktor(Ratings):
pass
class NA_Fiora_Jng_Vladimir(Ratings):
pass
class NA_Fiora_Jng_Volibear(Ratings):
pass
class NA_Fiora_Jng_Warwick(Ratings):
pass
class NA_Fiora_Jng_Xayah(Ratings):
pass
class NA_Fiora_Jng_Xerath(Ratings):
pass
class NA_Fiora_Jng_XinZhao(Ratings):
pass
class NA_Fiora_Jng_Yasuo(Ratings):
pass
class NA_Fiora_Jng_Yorick(Ratings):
pass
class NA_Fiora_Jng_Zac(Ratings):
pass
class NA_Fiora_Jng_Zed(Ratings):
pass
class NA_Fiora_Jng_Ziggs(Ratings):
pass
class NA_Fiora_Jng_Zilean(Ratings):
pass
class NA_Fiora_Jng_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
b4a0b64aa8ae0413832c0911cbe5f3fd5d89f7ea
| 5,595
|
py
|
Python
|
moai/nn/residual/standard.py
|
tzole1155/moai
|
d1afb3aaf8ddcd7a1c98b84d6365afb846ae3180
|
[
"Apache-2.0"
] | 10
|
2021-04-02T11:21:33.000Z
|
2022-01-18T18:32:32.000Z
|
moai/nn/residual/standard.py
|
tzole1155/moai
|
d1afb3aaf8ddcd7a1c98b84d6365afb846ae3180
|
[
"Apache-2.0"
] | 1
|
2022-03-22T20:10:55.000Z
|
2022-03-24T13:11:02.000Z
|
moai/nn/residual/standard.py
|
tzole1155/moai
|
d1afb3aaf8ddcd7a1c98b84d6365afb846ae3180
|
[
"Apache-2.0"
] | 3
|
2021-05-16T20:47:40.000Z
|
2021-12-01T21:15:36.000Z
|
import moai.nn.convolution as mic
import moai.nn.activation as mia
import torch
__all__ = [
"Standard",
"PreResidual",
"PreActivation",
]
'''
Slightly adapted version of
Deep Residual Learning for Image Recognition (https://arxiv.org/pdf/1512.03385.pdf)
(adaptation on activation ordering as denoted in the factory below)
'''
class Standard(torch.nn.Module): # (b) in https://towardsdatascience.com/an-overview-of-resnet-and-its-variants-5281e2f56035
def __init__(self,
convolution_type: str,
activation_type: str,
in_features: int,
out_features: int,
convolution_params: dict,
activation_params: dict,
strided: bool,
):
super(Standard, self).__init__()
self.W1 = mic.make_conv_3x3(
convolution_type=convolution_type,
in_channels=in_features,
out_channels=out_features,
stride=2 if strided else 1,
**convolution_params
)
self.A1 = mia.make_activation(
features=out_features,
activation_type=activation_type,
**activation_params
)
self.W2 = mic.make_conv_3x3(
convolution_type=convolution_type,
in_channels=out_features,
out_channels=out_features,
**convolution_params
)
self.A2 = mia.make_activation(
features=out_features,
activation_type=activation_type,
**activation_params
)
self.S = torch.nn.Identity() if in_features == out_features\
else mic.make_conv_1x1(
convolution_type=convolution_type,
in_channels=in_features,
out_channels=out_features
# using a 3x3 conv for shortcut downscaling instead of a 1x1 (used in detectron2 for example)
) if not strided else mic.make_conv_3x3(
convolution_type=convolution_type,
in_channels=in_features,
out_channels=out_features,
stride=2
)
def forward(self, x: torch.Tensor) -> torch.Tensor:
y = self.W2(self.A1(self.W1(x))) # y = W2 * A1(W1 * x)
return self.A2(self.S(x) + y) # out = A2(S(x) + y)
'''
Slightly adapted version of
Identity Mappings in Deep Residual Networks (https://arxiv.org/pdf/1603.05027.pdf)
(adaptation on activation ordering as denoted in the factory below)
'''
class PreResidual(Standard): # (c) in https://towardsdatascience.com/an-overview-of-resnet-and-its-variants-5281e2f56035
def __init__(self,
convolution_type: str,
activation_type: str,
in_features: int,
out_features: int,
convolution_params: dict,
activation_params: dict,
strided: bool,
):
super(PreResidual, self).__init__(
convolution_type=convolution_type,
activation_type=activation_type,
in_features=in_features,
out_features=out_features,
convolution_params=convolution_params,
activation_params=activation_params,
strided=strided,
)
def forward(self, x: torch.Tensor) -> torch.Tensor:
y = self.A2(self.W2(self.A1(self.W1(x)))) # y = A2(W2 * A1(W1 * x))
return self.S(x) + y # out = x + y
'''
Slightly adapted version of
Identity Mappings in Deep Residual Networks (https://arxiv.org/pdf/1603.05027.pdf)
(adaptation on activation ordering as denoted in the factory below)
'''
class PreActivation(torch.nn.Module): # (e) in https://towardsdatascience.com/an-overview-of-resnet-and-its-variants-5281e2f56035
def __init__(self,
convolution_type: str,
activation_type: str,
in_features: int,
out_features: int,
convolution_params: dict,
activation_params: dict,
strided: bool,
):
super(PreActivation, self).__init__()
self.W1 = mic.make_conv_3x3(
convolution_type=convolution_type,
in_channels=in_features,
out_channels=out_features,
stride=2 if strided else 1,
**convolution_params
)
self.A1 = mia.make_activation(
features=in_features,
activation_type=activation_type,
**activation_params
)
self.W2 = mic.make_conv_3x3(
convolution_type=convolution_type,
in_channels=out_features,
out_channels=out_features,
**convolution_params
)
self.A2 = mia.make_activation(
features=out_features,
activation_type=activation_type,
**activation_params
)
self.S = torch.nn.Identity() if in_features == out_features\
else mic.make_conv_1x1(
convolution_type=convolution_type,
in_channels=in_features,
out_channels=out_features
# using a 3x3 conv for shortcut downscaling instead of a 1x1 (used in detectron2 for example)
) if not strided else mic.make_conv_3x3(
convolution_type=convolution_type,
in_channels=in_features,
out_channels=out_features,
stride=2
)
def forward(self, x: torch.Tensor) -> torch.Tensor:
y = self.W2(self.A2(self.W1(self.A1(x)))) # y = W2 * A2(W1 * A1(x))
return self.S(x) + y # out = x + y
| 37.05298
| 129
| 0.597855
| 633
| 5,595
| 5.047393
| 0.154818
| 0.098592
| 0.059468
| 0.084507
| 0.839437
| 0.836307
| 0.829108
| 0.829108
| 0.819718
| 0.808451
| 0
| 0.031576
| 0.315103
| 5,595
| 151
| 130
| 37.05298
| 0.802192
| 0.100804
| 0
| 0.722222
| 0
| 0
| 0.007233
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.02381
| 0
| 0.119048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
370d93ac11edd1b512609e256db3e514dbda65b4
| 1,097
|
py
|
Python
|
Leetcode/Python/_1742.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | 1
|
2021-11-28T15:03:32.000Z
|
2021-11-28T15:03:32.000Z
|
Leetcode/Python/_1742.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
Leetcode/Python/_1742.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
class Solution:
def countBalls(self, lowLimit: int, highLimit: int) -> int:
hashMap = {}
max_value = 0
for box_id in range(lowLimit, highLimit+1):
runner = box_id
box_num = 0
while runner > 0:
box_num += (runner%10)
runner //= 10
if box_num not in hashMap:
hashMap[box_num] = 1
else:
hashMap[box_num] += 1
if hashMap[box_num] > max_value:
max_value = hashMap[box_num]
return max_value
class Solution:
def countBalls(self, lowLimit: int, highLimit: int) -> int:
hashMap = {}
max_value = 0
for box_id in range(lowLimit, highLimit+1):
runner = box_id
box_num = 0
while runner > 0:
box_num += (runner%10)
runner //= 10
if box_num not in hashMap:
hashMap[box_num] = 1
else:
hashMap[box_num] += 1
array = hashMap.values()
return max(array)
| 31.342857
| 63
| 0.482224
| 124
| 1,097
| 4.096774
| 0.225806
| 0.141732
| 0.153543
| 0.110236
| 0.822835
| 0.822835
| 0.822835
| 0.822835
| 0.822835
| 0.822835
| 0
| 0.032154
| 0.432999
| 1,097
| 34
| 64
| 32.264706
| 0.784566
| 0
| 0
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2eebceb82a53f233907da693b1221fb532dabe89
| 7,945
|
py
|
Python
|
prepare_training/scene_scaler.py
|
flclain/master
|
a40a2a5bf4e1017da3a5cf3456eb0b28de6f86e8
|
[
"MIT"
] | null | null | null |
prepare_training/scene_scaler.py
|
flclain/master
|
a40a2a5bf4e1017da3a5cf3456eb0b28de6f86e8
|
[
"MIT"
] | null | null | null |
prepare_training/scene_scaler.py
|
flclain/master
|
a40a2a5bf4e1017da3a5cf3456eb0b28de6f86e8
|
[
"MIT"
] | 1
|
2020-12-15T07:34:39.000Z
|
2020-12-15T07:34:39.000Z
|
import os
import csv
from sklearn.preprocessing import MinMaxScaler
import numpy as np
import helpers
import json
import sys
import joblib
class SceneScaler():
def __init__(self,data,center):
data = json.load(open(data))
self.center = center
self.temp = data["temp"] + "temp.csv"
self.original_file = data["preprocessed_datasets"] + "{}.csv"
self.scaler_dest = data["scalers"] + "{}.joblib"
def min_max_scale(self,scene):
# self.original_file = self.original_file.format(scene)
helpers.remove_file(self.temp)
os.rename(self.original_file.format(scene),self.temp)
helpers.remove_file(self.original_file.format(scene))
with open(self.original_file.format(scene),"a+") as data_csv:
data_writer = csv.writer(data_csv)
mms = MinMaxScaler()
min_x,max_x,min_y,max_y = self.__get_boudaries(self.temp)
print(min_x,max_x,min_y,max_y)
x_mean = (min_x + max_x)/2.0
y_mean = (min_y + max_y)/2.0
min_ = min(min_x - x_mean,min_y - y_mean)
max_ = max(max_x - x_mean,max_y - y_mean)
# print(min_,max_)
mms = mms.fit([[min_],[max_]])
with open(self.temp) as scene_csv:
data_reader = csv.reader(scene_csv)
for row in data_reader:
if self.center:
row = self.__center_scene(row,x_mean,y_mean)
new_row = row
ps_untransformed = [[float(row[i])] for i in range(4,10)]
ps = mms.transform(ps_untransformed)
for i in range(len(ps)):
if ps_untransformed[i][0] == -10000:
new_row[4 + i] = -1
else:
new_row[4 + i] = ps[i][0]
data_writer.writerow(new_row)
helpers.remove_file(self.temp)
helpers.remove_file(self.scaler_dest.format(scene))
joblib.dump(mms, self.scaler_dest.format(scene))
def __get_boudaries(self,file_path):
with open(file_path) as scene_csv:
data_reader = csv.reader(scene_csv)
min_x,min_y = 10e30,10e30
max_x,max_y = 10e-30,10e-30
for row in data_reader:
# x = np.min([[float(row[4])],[float(row[6])],[float(row[8])]])
# y = np.min([[float(row[5])],[float(row[7])],[float(row[9])]])
# print([[float(row[i])] for i in range(4,10,2) ])
# print([[float(row[i])] for i in range(5,11,2) ])
x = np.min([[float(row[i])] for i in range(4,10,2) if float(row[i]) != -10000])
y = np.min([[float(row[i])] for i in range(5,11,2) if float(row[i]) != -10000])
if x < min_x and x != -1:
min_x = x
if y < min_y and y != -1:
min_y = y
if x > max_x and x != -1:
max_x = x
if y > max_y and y != -1:
max_y = y
return min_x,max_x,min_y,max_y
def __center_scene(self,row,x_mean,y_mean):
new_row = row
for i in range(4,10,2):
if float(row[i]) != -10000:
new_row[i] = float(row[i]) - x_mean
for i in range(5,11,2):
if float(row[i]) != -10000:
new_row[i] = float(row[i]) - y_mean
return new_row
class SceneScalerMultiScene():
def __init__(self,data,center,scene_list):
data = json.load(open(data))
self.center = center
self.temp = data["temp"] + "temp.csv"
self.original_file = data["preprocessed_datasets"] + "{}.csv"
self.scaler_dest = data["scalers"]
self.scene_list = scene_list
self.scaler = None
def __get_scaler(self):
mms = MinMaxScaler()
min_ = 1e30
max_ = -1e30
for scene in self.scene_list:
min_x,max_x,min_y,max_y = self.__get_boudaries(self.original_file.format(scene))
# print(min_x,max_x,min_y,max_y)
x_mean = (min_x + max_x)/2.0
y_mean = (min_y + max_y)/2.0
min_scene = min(min_x - x_mean,min_y - y_mean)
max_scene = max(max_x - x_mean,max_y - y_mean)
print(min_scene,max_scene)
min_ = min(min_scene,min_)
max_ = max(max_scene,max_)
print(min_,max_)
mms = mms.fit([[min_],[max_]])
print(mms.data_min_,mms.data_max_)
self.scaler = mms
helpers.remove_file(self.scaler_dest)
joblib.dump(self.scaler, self.scaler_dest)
def min_max_scale(self,scene):
if self.scaler == None:
self.__get_scaler()
helpers.remove_file(self.temp)
os.rename(self.original_file.format(scene),self.temp)
helpers.remove_file(self.original_file.format(scene))
with open(self.original_file.format(scene),"a+") as data_csv:
data_writer = csv.writer(data_csv)
min_x,max_x,min_y,max_y = self.__get_boudaries(self.temp)
x_mean = (min_x + max_x)/2.0
y_mean = (min_y + max_y)/2.0
with open(self.temp) as scene_csv:
data_reader = csv.reader(scene_csv)
for row in data_reader:
if self.center:
row = self.__center_scene(row,x_mean,y_mean)
new_row = row
ps_untransformed = [[float(row[i])] for i in range(4,10)]
ps = self.scaler.transform(ps_untransformed)
for i in range(len(ps)):
if ps_untransformed[i][0] == -10000:
new_row[4 + i] = -1
else:
new_row[4 + i] = ps[i][0]
data_writer.writerow(new_row)
helpers.remove_file(self.temp)
def __get_boudaries(self,file_path):
with open(file_path) as scene_csv:
data_reader = csv.reader(scene_csv)
min_x,min_y = 10e30,10e30
max_x,max_y = 10e-30,10e-30
for row in data_reader:
# x = np.min([[float(row[4])],[float(row[6])],[float(row[8])]])
# y = np.min([[float(row[5])],[float(row[7])],[float(row[9])]])
# print([[float(row[i])] for i in range(4,10,2) ])
# print([[float(row[i])] for i in range(5,11,2) ])
x = np.min([[float(row[i])] for i in range(4,10,2) if float(row[i]) != -10000])
y = np.min([[float(row[i])] for i in range(5,11,2) if float(row[i]) != -10000])
if x < min_x and x != -1:
min_x = x
if y < min_y and y != -1:
min_y = y
if x > max_x and x != -1:
max_x = x
if y > max_y and y != -1:
max_y = y
return min_x,max_x,min_y,max_y
def __center_scene(self,row,x_mean,y_mean):
new_row = row
for i in range(4,10,2):
if float(row[i]) != -10000:
new_row[i] = float(row[i]) - x_mean
for i in range(5,11,2):
if float(row[i]) != -10000:
new_row[i] = float(row[i]) - y_mean
return new_row
# python prepare_training/scene_scaler.py parameters/data.json 1 lankershim_inter2
def main():
args = sys.argv
scene_scaler = SceneScaler(args[1],int(args[2]))
scene_scaler.min_max_scale(args[3])
if __name__ == "__main__":
main()
| 31.78
| 95
| 0.500441
| 1,094
| 7,945
| 3.392139
| 0.091408
| 0.073296
| 0.053355
| 0.047427
| 0.824037
| 0.79682
| 0.759364
| 0.759364
| 0.747507
| 0.747507
| 0
| 0.038005
| 0.374072
| 7,945
| 250
| 96
| 31.78
| 0.708224
| 0.078792
| 0
| 0.708861
| 0
| 0
| 0.015465
| 0.005748
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063291
| false
| 0
| 0.050633
| 0
| 0.151899
| 0.025316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ef7b2d2a7a1025039e9c44a8e3e3ca196bcb20a
| 86
|
py
|
Python
|
src/sample/simple.py
|
Jithendhar/sampleproject
|
62e564c52402ba1f7d7fdf1d188b3ff446635601
|
[
"MIT"
] | null | null | null |
src/sample/simple.py
|
Jithendhar/sampleproject
|
62e564c52402ba1f7d7fdf1d188b3ff446635601
|
[
"MIT"
] | null | null | null |
src/sample/simple.py
|
Jithendhar/sampleproject
|
62e564c52402ba1f7d7fdf1d188b3ff446635601
|
[
"MIT"
] | null | null | null |
def add_one(number):
return number + 1
def add_one(number):
return number + 2
| 17.2
| 21
| 0.674419
| 14
| 86
| 4
| 0.5
| 0.214286
| 0.321429
| 0.535714
| 0.964286
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.232558
| 86
| 4
| 22
| 21.5
| 0.818182
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
25d45c27fbe3e35b9b45e9427e2070e1c5f69f0c
| 25,977
|
py
|
Python
|
argo/workflows/client/api/archived_workflow_service_api.py
|
zgs225/argo-client-python
|
2e49a0df9b4f8fc9e90f7808caf22819ff54166c
|
[
"Apache-2.0"
] | 75
|
2020-03-17T03:55:23.000Z
|
2021-11-08T09:38:37.000Z
|
argo/workflows/client/api/archived_workflow_service_api.py
|
zgs225/argo-client-python
|
2e49a0df9b4f8fc9e90f7808caf22819ff54166c
|
[
"Apache-2.0"
] | 24
|
2020-04-18T13:02:36.000Z
|
2021-10-20T09:01:23.000Z
|
argo/workflows/client/api/archived_workflow_service_api.py
|
zgs225/argo-client-python
|
2e49a0df9b4f8fc9e90f7808caf22819ff54166c
|
[
"Apache-2.0"
] | 26
|
2020-04-18T12:56:28.000Z
|
2022-01-05T04:47:30.000Z
|
# coding: utf-8
"""
Argo Server API
You can get examples of requests and responses by using the CLI with `--gloglevel=9`, e.g. `argo list --gloglevel=9` # noqa: E501
The version of the OpenAPI document: v2.12.2
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from argo.workflows.client.api_client import ApiClient
from argo.workflows.client.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class ArchivedWorkflowServiceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_archived_workflow(self, uid, **kwargs): # noqa: E501
"""delete_archived_workflow # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_archived_workflow(uid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str uid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_archived_workflow_with_http_info(uid, **kwargs) # noqa: E501
def delete_archived_workflow_with_http_info(self, uid, **kwargs): # noqa: E501
"""delete_archived_workflow # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_archived_workflow_with_http_info(uid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str uid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(object, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_archived_workflow" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'uid' is set
if self.api_client.client_side_validation and ('uid' not in local_var_params or # noqa: E501
local_var_params['uid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `uid` when calling `delete_archived_workflow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'uid' in local_var_params:
path_params['uid'] = local_var_params['uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/archived-workflows/{uid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_archived_workflow(self, uid, **kwargs): # noqa: E501
"""get_archived_workflow # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_archived_workflow(uid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str uid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1alpha1Workflow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_archived_workflow_with_http_info(uid, **kwargs) # noqa: E501
def get_archived_workflow_with_http_info(self, uid, **kwargs): # noqa: E501
"""get_archived_workflow # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_archived_workflow_with_http_info(uid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str uid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1alpha1Workflow, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_archived_workflow" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'uid' is set
if self.api_client.client_side_validation and ('uid' not in local_var_params or # noqa: E501
local_var_params['uid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `uid` when calling `get_archived_workflow`") # noqa: E501
collection_formats = {}
path_params = {}
if 'uid' in local_var_params:
path_params['uid'] = local_var_params['uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/archived-workflows/{uid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1alpha1Workflow', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_archived_workflows(self, **kwargs): # noqa: E501
"""list_archived_workflows # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_archived_workflows(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.
:param str list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.
:param bool list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.
:param bool list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. If the feature gate WatchBookmarks is not enabled in apiserver, this field is ignored. +optional.
:param str list_options_resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. +optional.
:param str list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.
:param str list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1alpha1WorkflowList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_archived_workflows_with_http_info(**kwargs) # noqa: E501
def list_archived_workflows_with_http_info(self, **kwargs): # noqa: E501
"""list_archived_workflows # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_archived_workflows_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str list_options_label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. +optional.
:param str list_options_field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. +optional.
:param bool list_options_watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. +optional.
:param bool list_options_allow_watch_bookmarks: allowWatchBookmarks requests watch events with type \"BOOKMARK\". Servers that do not implement bookmarks may ignore this flag and bookmarks are sent at the server's discretion. Clients should not assume bookmarks are returned at any specific interval, nor may they assume the server will send any BOOKMARK event during a session. If this is not a watch, this field is ignored. If the feature gate WatchBookmarks is not enabled in apiserver, this field is ignored. +optional.
:param str list_options_resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. +optional.
:param str list_options_timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. +optional.
:param str list_options_limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str list_options_continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1alpha1WorkflowList, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'list_options_label_selector',
'list_options_field_selector',
'list_options_watch',
'list_options_allow_watch_bookmarks',
'list_options_resource_version',
'list_options_timeout_seconds',
'list_options_limit',
'list_options_continue'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_archived_workflows" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'list_options_label_selector' in local_var_params and local_var_params['list_options_label_selector'] is not None: # noqa: E501
query_params.append(('listOptions.labelSelector', local_var_params['list_options_label_selector'])) # noqa: E501
if 'list_options_field_selector' in local_var_params and local_var_params['list_options_field_selector'] is not None: # noqa: E501
query_params.append(('listOptions.fieldSelector', local_var_params['list_options_field_selector'])) # noqa: E501
if 'list_options_watch' in local_var_params and local_var_params['list_options_watch'] is not None: # noqa: E501
query_params.append(('listOptions.watch', local_var_params['list_options_watch'])) # noqa: E501
if 'list_options_allow_watch_bookmarks' in local_var_params and local_var_params['list_options_allow_watch_bookmarks'] is not None: # noqa: E501
query_params.append(('listOptions.allowWatchBookmarks', local_var_params['list_options_allow_watch_bookmarks'])) # noqa: E501
if 'list_options_resource_version' in local_var_params and local_var_params['list_options_resource_version'] is not None: # noqa: E501
query_params.append(('listOptions.resourceVersion', local_var_params['list_options_resource_version'])) # noqa: E501
if 'list_options_timeout_seconds' in local_var_params and local_var_params['list_options_timeout_seconds'] is not None: # noqa: E501
query_params.append(('listOptions.timeoutSeconds', local_var_params['list_options_timeout_seconds'])) # noqa: E501
if 'list_options_limit' in local_var_params and local_var_params['list_options_limit'] is not None: # noqa: E501
query_params.append(('listOptions.limit', local_var_params['list_options_limit'])) # noqa: E501
if 'list_options_continue' in local_var_params and local_var_params['list_options_continue'] is not None: # noqa: E501
query_params.append(('listOptions.continue', local_var_params['list_options_continue'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/archived-workflows', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1alpha1WorkflowList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 64.140741
| 1,403
| 0.670747
| 3,351
| 25,977
| 5.021486
| 0.115488
| 0.029476
| 0.046592
| 0.017115
| 0.930944
| 0.919831
| 0.905568
| 0.891603
| 0.887859
| 0.871813
| 0
| 0.010873
| 0.270663
| 25,977
| 404
| 1,404
| 64.299505
| 0.877283
| 0.597105
| 0
| 0.626943
| 0
| 0
| 0.22347
| 0.128691
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036269
| false
| 0
| 0.025907
| 0
| 0.098446
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25e12aead27e3991251d73f52c12ea0906cc5dc6
| 120
|
py
|
Python
|
08-def-type-hints/ctime.py
|
hdcpereira/example-code-2e
|
ade7558007f149e5ab7465dd9618d432f169eb9f
|
[
"MIT"
] | 990
|
2019-03-21T21:17:34.000Z
|
2022-03-31T00:55:07.000Z
|
08-def-type-hints/ctime.py
|
Turall/example-code-2e
|
1702717182cff9a48beb55b2a9f5618e9bd1da18
|
[
"MIT"
] | 17
|
2019-12-18T18:00:05.000Z
|
2022-01-12T14:23:47.000Z
|
08-def-type-hints/ctime.py
|
Turall/example-code-2e
|
1702717182cff9a48beb55b2a9f5618e9bd1da18
|
[
"MIT"
] | 276
|
2019-04-06T12:32:00.000Z
|
2022-03-29T11:50:47.000Z
|
import time
from typing import Optional
def ctime(secs: Optional[float] = None, /) -> str:
return time.ctime(secs)
| 20
| 50
| 0.708333
| 17
| 120
| 5
| 0.705882
| 0.211765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 120
| 5
| 51
| 24
| 0.858586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
25e1ae44f45af84fee413d728a61965f98da05ce
| 23,671
|
py
|
Python
|
test/test_scrapbook_exporter.py
|
clach04/PyWebScrapBook
|
310e8f20cc5337336875679246b9269265b4476a
|
[
"MIT"
] | 39
|
2019-04-10T18:07:40.000Z
|
2022-02-07T07:11:30.000Z
|
test/test_scrapbook_exporter.py
|
clach04/PyWebScrapBook
|
310e8f20cc5337336875679246b9269265b4476a
|
[
"MIT"
] | 56
|
2019-05-07T23:29:14.000Z
|
2022-02-24T10:33:43.000Z
|
test/test_scrapbook_exporter.py
|
clach04/PyWebScrapBook
|
310e8f20cc5337336875679246b9269265b4476a
|
[
"MIT"
] | 15
|
2019-06-12T05:16:43.000Z
|
2022-01-16T13:24:11.000Z
|
from unittest import mock
import unittest
import os
import shutil
import zipfile
import json
from datetime import datetime, timezone
from base64 import b64decode, b64encode
from webscrapbook import WSB_DIR
from webscrapbook import util
from webscrapbook.scrapbook import exporter as wsb_exporter
from webscrapbook._compat import zip_stream
root_dir = os.path.abspath(os.path.dirname(__file__))
test_root = os.path.join(root_dir, 'test_scrapbook_exporter')
def setUpModule():
# mock out user config
global mockings
mockings = [
mock.patch('webscrapbook.scrapbook.host.WSB_USER_DIR', os.path.join(test_root, 'wsb')),
mock.patch('webscrapbook.WSB_USER_DIR', os.path.join(test_root, 'wsb')),
mock.patch('webscrapbook.WSB_USER_CONFIG', test_root),
]
for mocking in mockings:
mocking.start()
def tearDownModule():
# stop mock
for mocking in mockings:
mocking.stop()
class TestExporter(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.maxDiff = 8192
cls.test_root = os.path.join(test_root, 'general')
cls.test_input = os.path.join(cls.test_root, 'input')
cls.test_input_wsb = os.path.join(cls.test_input, WSB_DIR)
cls.test_input_config = os.path.join(cls.test_input_wsb, 'config.ini')
cls.test_input_tree = os.path.join(cls.test_input_wsb, 'tree')
cls.test_input_meta = os.path.join(cls.test_input_tree, 'meta.js')
cls.test_input_toc = os.path.join(cls.test_input_tree, 'toc.js')
cls.test_output = os.path.join(cls.test_root, 'output')
def setUp(self):
"""Set up a general temp test folder
"""
os.makedirs(self.test_input_tree, exist_ok=True)
os.makedirs(self.test_output, exist_ok=True)
def tearDown(self):
"""Remove general temp test folder
"""
try:
shutil.rmtree(self.test_root)
except NotADirectoryError:
os.remove(self.test_root)
except FileNotFoundError:
pass
def test_basic01(self):
"""Test exporting a common */index.html
"""
with open(self.test_input_meta, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.meta({
"20200101000000000": {
"type": "folder",
"title": "item0",
"index": "20200101000000000/index.html",
"create": "20200102000000000",
"modify": "20200103000000000",
"source": "http://example.com",
"icon": "favicon.bmp"
}
})""")
with open(self.test_input_toc, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.toc({
"root": [
"20200101000000000"
]
})""")
index_file = os.path.join(self.test_input, '20200101000000000', 'index.html')
os.makedirs(os.path.dirname(index_file))
with open(index_file, 'w', encoding='UTF-8') as fh:
fh.write('ABC123')
for info in wsb_exporter.run(self.test_input, self.test_output):
pass
with os.scandir(self.test_output) as entries:
files = sorted(entries, key=lambda x: x.path)
# files are exported in depth-first order
with zipfile.ZipFile(files[0]) as zh:
with zh.open('meta.json') as fh:
data = json.load(fh)
with zh.open('export.json') as fh:
export_info = json.load(fh)
with zh.open('data/20200101000000000/index.html') as fh:
index_data = fh.read().decode('UTF-8')
self.assertEqual(data, {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
'index': '20200101000000000/index.html',
'create': '20200102000000000',
'modify': '20200103000000000',
'source': 'http://example.com',
'icon': 'favicon.bmp',
})
self.assertEqual(export_info['version'], 1)
self.assertAlmostEqual(util.id_to_datetime(export_info['id']).timestamp(), datetime.now(timezone.utc).timestamp(), delta=3)
self.assertEqual(export_info['timestamp'], export_info['id'])
self.assertEqual(export_info['timezone'], datetime.now().astimezone().utcoffset().total_seconds())
self.assertEqual(export_info['path'], [{'id': 'root', 'title': ''}])
self.assertEqual(index_data, 'ABC123')
def test_basic02(self):
"""Test exporting a common *.htz
"""
with open(self.test_input_meta, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.meta({
"20200101000000000": {
"type": "folder",
"title": "item0",
"index": "20200101000000000.htz",
"create": "20200102000000000",
"modify": "20200103000000000",
"source": "http://example.com",
"icon": ".wsb/tree/favicon/dbc82be549e49d6db9a5719086722a4f1c5079cd.bmp"
}
})""")
with open(self.test_input_toc, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.toc({
"root": [
"20200101000000000"
]
})""")
index_file = os.path.join(self.test_input, '20200101000000000.htz')
with zipfile.ZipFile(index_file, 'w') as zh:
zh.writestr('index.html', 'ABC123')
favicon_file = os.path.join(self.test_input_tree, 'favicon', 'dbc82be549e49d6db9a5719086722a4f1c5079cd.bmp')
os.makedirs(os.path.dirname(favicon_file))
with open(favicon_file, 'wb') as fh:
fh.write(b64decode('Qk08AAAAAAAAADYAAAAoAAAAAQAAAAEAAAABACAAAAAAAAYAAAASCwAAEgsAAAAAAAAAAAAAAP8AAAAA'))
for info in wsb_exporter.run(self.test_input, self.test_output):
pass
with os.scandir(self.test_output) as entries:
files = sorted(entries, key=lambda x: x.path)
# files are exported in depth-first order
with zipfile.ZipFile(files[0]) as zh:
with zh.open('meta.json') as fh:
data = json.load(fh)
with zh.open('export.json') as fh:
export_info = json.load(fh)
with zh.open('data/20200101000000000.htz') as fh:
fh = zip_stream(fh)
with zipfile.ZipFile(fh) as zh2:
with zh2.open('index.html') as fh2:
index_data = fh2.read().decode('UTF-8')
with zh.open('favicon/dbc82be549e49d6db9a5719086722a4f1c5079cd.bmp') as fh:
favicon_data = fh.read()
self.assertEqual(data, {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
'index': '20200101000000000.htz',
'create': '20200102000000000',
'modify': '20200103000000000',
'source': 'http://example.com',
'icon': '.wsb/tree/favicon/dbc82be549e49d6db9a5719086722a4f1c5079cd.bmp',
})
self.assertEqual(export_info['version'], 1)
self.assertAlmostEqual(util.id_to_datetime(export_info['id']).timestamp(), datetime.now(timezone.utc).timestamp(), delta=3)
self.assertEqual(export_info['timestamp'], export_info['id'])
self.assertEqual(export_info['timezone'], datetime.now().astimezone().utcoffset().total_seconds())
self.assertEqual(export_info['path'], [{'id': 'root', 'title': ''}])
self.assertEqual(index_data, 'ABC123')
self.assertEqual(b64encode(favicon_data), b'Qk08AAAAAAAAADYAAAAoAAAAAQAAAAEAAAABACAAAAAAAAYAAAASCwAAEgsAAAAAAAAAAAAAAP8AAAAA')
def test_toc01(self):
"""Export all if item_ids not set
- Include hidden (at last).
- Exclude recycle.
"""
with open(self.test_input_meta, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.meta({
"20200101000000000": {
"type": "folder",
"title": "item0"
},
"20200101000000001": {
"type": "folder",
"title": "item1"
},
"20200101000000002": {
"type": "folder",
"title": "item2"
},
"20200101000000003": {
"type": "folder",
"title": "item3"
},
"20200101000000004": {
"type": "folder",
"title": "item4"
}
})""")
with open(self.test_input_toc, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.toc({
"hidden": [
"20200101000000003"
],
"root": [
"20200101000000000",
"20200101000000001"
],
"20200101000000000": [
"20200101000000002"
],
"recycle": [
"20200101000000004"
]
})""")
for info in wsb_exporter.run(self.test_input, self.test_output):
pass
with os.scandir(self.test_output) as entries:
files = sorted(entries, key=lambda x: x.path)
metas = []
export_infos = []
for file in files:
with zipfile.ZipFile(file) as zh:
with zh.open('meta.json') as fh:
metas.append(json.load(fh))
with zh.open('export.json') as fh:
export_infos.append(json.load(fh))
self.assertEqual(len(files), 4)
self.assertEqual(len(set(e['id'] for e in export_infos)), 4)
# files are exported in depth-first order
self.assertEqual(metas[0], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[0]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[1], {
'id': '20200101000000002',
'type': 'folder',
'title': 'item2',
})
self.assertEqual(export_infos[1]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000000', 'title': 'item0'},
])
self.assertEqual(metas[2], {
'id': '20200101000000001',
'type': 'folder',
'title': 'item1',
})
self.assertEqual(export_infos[2]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[3], {
'id': '20200101000000003',
'type': 'folder',
'title': 'item3',
})
self.assertEqual(export_infos[3]['path'], [
{'id': 'hidden', 'title': ''},
])
def test_toc02(self):
"""Export only those specified by item_ids
- Never include recycle.
"""
with open(self.test_input_meta, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.meta({
"20200101000000000": {
"type": "folder",
"title": "item0"
},
"20200101000000001": {
"type": "folder",
"title": "item1"
},
"20200101000000002": {
"type": "folder",
"title": "item2"
},
"20200101000000003": {
"type": "folder",
"title": "item3"
},
"20200101000000004": {
"type": "folder",
"title": "item4"
},
"20200101000000005": {
"type": "folder",
"title": "item5"
},
"20200101000000006": {
"type": "folder",
"title": "item6"
}
})""")
with open(self.test_input_toc, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.toc({
"hidden": [
"20200101000000003",
"20200101000000004"
],
"root": [
"20200101000000000",
"20200101000000001"
],
"20200101000000000": [
"20200101000000002"
],
"recycle": [
"20200101000000005",
"20200101000000006"
]
})""")
for info in wsb_exporter.run(self.test_input, self.test_output,
item_ids=['20200101000000000', '20200101000000003', '20200101000000005']):
pass
with os.scandir(self.test_output) as entries:
files = sorted(entries, key=lambda x: x.path)
metas = []
export_infos = []
for file in files:
with zipfile.ZipFile(file) as zh:
with zh.open('meta.json') as fh:
metas.append(json.load(fh))
with zh.open('export.json') as fh:
export_infos.append(json.load(fh))
self.assertEqual(len(files), 2)
# files are exported in depth-first order
self.assertEqual(metas[0], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[0]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[1], {
'id': '20200101000000003',
'type': 'folder',
'title': 'item3',
})
self.assertEqual(export_infos[1]['path'], [
{'id': 'hidden', 'title': ''},
])
def test_toc03(self):
"""Export descendants if recursive"""
with open(self.test_input_meta, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.meta({
"20200101000000000": {
"type": "folder",
"title": "item0"
},
"20200101000000001": {
"type": "folder",
"title": "item1"
},
"20200101000000002": {
"type": "folder",
"title": "item2"
},
"20200101000000003": {
"type": "folder",
"title": "item3"
}
})""")
with open(self.test_input_toc, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.toc({
"root": [
"20200101000000000",
"20200101000000001"
],
"20200101000000000": [
"20200101000000002"
],
"20200101000000002": [
"20200101000000003"
]
})""")
for info in wsb_exporter.run(self.test_input, self.test_output,
item_ids=['20200101000000000'], recursive=True):
pass
with os.scandir(self.test_output) as entries:
files = sorted(entries, key=lambda x: x.path)
metas = []
export_infos = []
for file in files:
with zipfile.ZipFile(file) as zh:
with zh.open('meta.json') as fh:
metas.append(json.load(fh))
with zh.open('export.json') as fh:
export_infos.append(json.load(fh))
self.assertEqual(len(files), 3)
# files are exported in depth-first order
self.assertEqual(metas[0], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[0]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[1], {
'id': '20200101000000002',
'type': 'folder',
'title': 'item2',
})
self.assertEqual(export_infos[1]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000000', 'title': 'item0'},
])
self.assertEqual(metas[2], {
'id': '20200101000000003',
'type': 'folder',
'title': 'item3',
})
self.assertEqual(export_infos[2]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000000', 'title': 'item0'},
{'id': '20200101000000002', 'title': 'item2'},
])
def test_toc04(self):
"""Export all occurrences
- Occurrences of the same item should share same export id.
"""
with open(self.test_input_meta, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.meta({
"20200101000000000": {
"type": "folder",
"title": "item0"
},
"20200101000000001": {
"type": "folder",
"title": "item1"
},
"20200101000000002": {
"type": "folder",
"title": "item2"
},
"20200101000000003": {
"type": "folder",
"title": "item3"
}
})""")
with open(self.test_input_toc, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.toc({
"root": [
"20200101000000000",
"20200101000000000",
"20200101000000001",
"20200101000000002"
],
"20200101000000001": [
"20200101000000000"
],
"20200101000000002": [
"20200101000000003"
],
"20200101000000003": [
"20200101000000000"
]
})""")
for info in wsb_exporter.run(self.test_input, self.test_output):
pass
with os.scandir(self.test_output) as entries:
files = sorted(entries, key=lambda x: x.path)
metas = []
export_infos = []
for file in files:
with zipfile.ZipFile(file) as zh:
with zh.open('meta.json') as fh:
metas.append(json.load(fh))
with zh.open('export.json') as fh:
export_infos.append(json.load(fh))
self.assertEqual(len(files), 7)
# files are exported in depth-first order
self.assertEqual(metas[0], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[0]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[1], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[1]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(export_infos[1]['id'], export_infos[0]['id'])
self.assertEqual(metas[2], {
'id': '20200101000000001',
'type': 'folder',
'title': 'item1',
})
self.assertEqual(export_infos[2]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[3], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[3]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000001', 'title': 'item1'},
])
self.assertEqual(export_infos[3]['id'], export_infos[0]['id'])
self.assertEqual(metas[4], {
'id': '20200101000000002',
'type': 'folder',
'title': 'item2',
})
self.assertEqual(export_infos[4]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[5], {
'id': '20200101000000003',
'type': 'folder',
'title': 'item3',
})
self.assertEqual(export_infos[5]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000002', 'title': 'item2'},
])
self.assertEqual(metas[6], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[6]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000002', 'title': 'item2'},
{'id': '20200101000000003', 'title': 'item3'},
])
self.assertEqual(export_infos[6]['id'], export_infos[0]['id'])
def test_toc05(self):
"""Export first occurrence if singleton"""
with open(self.test_input_meta, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.meta({
"20200101000000000": {
"type": "folder",
"title": "item0"
},
"20200101000000001": {
"type": "folder",
"title": "item1"
},
"20200101000000002": {
"type": "folder",
"title": "item2"
},
"20200101000000003": {
"type": "folder",
"title": "item3"
}
})""")
with open(self.test_input_toc, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.toc({
"root": [
"20200101000000000",
"20200101000000000",
"20200101000000001",
"20200101000000002"
],
"20200101000000001": [
"20200101000000000"
],
"20200101000000002": [
"20200101000000003"
],
"20200101000000003": [
"20200101000000000"
]
})""")
for info in wsb_exporter.run(self.test_input, self.test_output, singleton=True):
pass
with os.scandir(self.test_output) as entries:
files = sorted(entries, key=lambda x: x.path)
metas = []
export_infos = []
for file in files:
with zipfile.ZipFile(file) as zh:
with zh.open('meta.json') as fh:
metas.append(json.load(fh))
with zh.open('export.json') as fh:
export_infos.append(json.load(fh))
self.assertEqual(len(files), 4)
# files are exported in depth-first order
self.assertEqual(metas[0], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[0]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[1], {
'id': '20200101000000001',
'type': 'folder',
'title': 'item1',
})
self.assertEqual(export_infos[1]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[2], {
'id': '20200101000000002',
'type': 'folder',
'title': 'item2',
})
self.assertEqual(export_infos[2]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[3], {
'id': '20200101000000003',
'type': 'folder',
'title': 'item3',
})
self.assertEqual(export_infos[3]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000002', 'title': 'item2'},
])
def test_toc06(self):
"""Export circular item but no children"""
with open(self.test_input_meta, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.meta({
"20200101000000000": {
"type": "folder",
"title": "item0"
},
"20200101000000001": {
"type": "folder",
"title": "item1"
}
})""")
with open(self.test_input_toc, 'w', encoding='UTF-8') as fh:
fh.write("""\
scrapbook.toc({
"root": [
"20200101000000000"
],
"20200101000000000": [
"20200101000000001"
],
"20200101000000001": [
"20200101000000000"
]
})""")
for info in wsb_exporter.run(self.test_input, self.test_output):
pass
with os.scandir(self.test_output) as entries:
files = sorted(entries, key=lambda x: x.path)
metas = []
export_infos = []
for file in files:
with zipfile.ZipFile(file) as zh:
with zh.open('meta.json') as fh:
metas.append(json.load(fh))
with zh.open('export.json') as fh:
export_infos.append(json.load(fh))
self.assertEqual(len(files), 3)
# files are exported in depth-first order
self.assertEqual(metas[0], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[0]['path'], [
{'id': 'root', 'title': ''},
])
self.assertEqual(metas[1], {
'id': '20200101000000001',
'type': 'folder',
'title': 'item1',
})
self.assertEqual(export_infos[1]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000000', 'title': 'item0'},
])
self.assertEqual(metas[2], {
'id': '20200101000000000',
'type': 'folder',
'title': 'item0',
})
self.assertEqual(export_infos[2]['path'], [
{'id': 'root', 'title': ''},
{'id': '20200101000000000', 'title': 'item0'},
{'id': '20200101000000001', 'title': 'item1'},
])
self.assertEqual(export_infos[2]['id'], export_infos[0]['id'])
if __name__ == '__main__':
unittest.main()
| 30.231162
| 134
| 0.532339
| 2,331
| 23,671
| 5.310596
| 0.091806
| 0.084821
| 0.064222
| 0.056709
| 0.813474
| 0.789967
| 0.764682
| 0.748364
| 0.724372
| 0.724372
| 0
| 0.154177
| 0.3021
| 23,671
| 782
| 135
| 30.269821
| 0.595157
| 0.03612
| 0
| 0.787923
| 0
| 0
| 0.315857
| 0.030048
| 0
| 0
| 0
| 0
| 0.106038
| 1
| 0.019146
| false
| 0.013255
| 0.017673
| 0
| 0.038292
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d3195b7d7950da22dcb2a91d0bfae9e3f410492e
| 261
|
py
|
Python
|
tests/queryset/__init__.py
|
Rippling/mongoengine
|
c3b6fa6ffdfe05fcf6f49857c1a89fee0175a05f
|
[
"MIT"
] | null | null | null |
tests/queryset/__init__.py
|
Rippling/mongoengine
|
c3b6fa6ffdfe05fcf6f49857c1a89fee0175a05f
|
[
"MIT"
] | 28
|
2016-11-30T03:15:18.000Z
|
2022-02-25T15:57:02.000Z
|
tests/queryset/__init__.py
|
Rippling/mongoengine
|
c3b6fa6ffdfe05fcf6f49857c1a89fee0175a05f
|
[
"MIT"
] | 1
|
2021-11-10T05:33:18.000Z
|
2021-11-10T05:33:18.000Z
|
from __future__ import absolute_import
from tests.queryset.transform import *
from tests.queryset.field_list import *
from tests.queryset.queryset import *
from tests.queryset.visitor import *
from tests.queryset.geo import *
from tests.queryset.modify import *
| 37.285714
| 39
| 0.827586
| 36
| 261
| 5.833333
| 0.333333
| 0.285714
| 0.428571
| 0.657143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 261
| 7
| 40
| 37.285714
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6caf887fe0e8004f69c0afe691d7d0260f448c0b
| 99
|
py
|
Python
|
setlx2py/setlx_semcheck.py
|
jcklie/setlx2py
|
4a6166ad52cf7ae973faa6bcaf22ac830fab738d
|
[
"Apache-2.0"
] | null | null | null |
setlx2py/setlx_semcheck.py
|
jcklie/setlx2py
|
4a6166ad52cf7ae973faa6bcaf22ac830fab738d
|
[
"Apache-2.0"
] | null | null | null |
setlx2py/setlx_semcheck.py
|
jcklie/setlx2py
|
4a6166ad52cf7ae973faa6bcaf22ac830fab738d
|
[
"Apache-2.0"
] | null | null | null |
def check_target(ast):
pass
def check_iterator(ast):
pass
def check_lambda(ast):
pass
| 12.375
| 24
| 0.686869
| 15
| 99
| 4.333333
| 0.466667
| 0.369231
| 0.307692
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 99
| 8
| 25
| 12.375
| 0.844156
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
6cbc774facdbb750e8dc789afdea0cc666f632b6
| 32,410
|
py
|
Python
|
tests/unit_tests/utils/test_category_encoders_backend.py
|
amnaabbassi/shapash
|
6c867c8b1724f2737369557f8db056cb0027999b
|
[
"Apache-2.0"
] | 1,665
|
2020-05-25T07:38:56.000Z
|
2022-03-29T15:02:23.000Z
|
tests/unit_tests/utils/test_category_encoders_backend.py
|
amnaabbassi/shapash
|
6c867c8b1724f2737369557f8db056cb0027999b
|
[
"Apache-2.0"
] | 93
|
2021-01-11T15:53:56.000Z
|
2022-03-29T14:48:18.000Z
|
tests/unit_tests/utils/test_category_encoders_backend.py
|
amnaabbassi/shapash
|
6c867c8b1724f2737369557f8db056cb0027999b
|
[
"Apache-2.0"
] | 226
|
2021-01-13T10:41:23.000Z
|
2022-03-13T01:37:49.000Z
|
"""
Unit test of Inverse Transform
"""
import unittest
import pandas as pd
import numpy as np
import category_encoders as ce
import catboost as cb
import sklearn
import lightgbm
import xgboost
from shapash.utils.transform import inverse_transform, apply_preprocessing, get_col_mapping_ce
class TestInverseTransformCaterogyEncoder(unittest.TestCase):
def test_inverse_transform_1(self):
"""
Test no preprocessing
"""
train = pd.DataFrame({'city': ['chicago', 'paris'],
'state': ['US', 'FR']})
original = inverse_transform(train)
pd.testing.assert_frame_equal(original, train)
def test_inverse_transform_2(self):
"""
Test multiple preprocessing
"""
train = pd.DataFrame({'Onehot1': ['A', 'B', 'A', 'B'], 'Onehot2': ['C', 'D', 'C', 'D'],
'Binary1': ['E', 'F', 'E', 'F'], 'Binary2': ['G', 'H', 'G', 'H'],
'Ordinal1': ['I', 'J', 'I', 'J'], 'Ordinal2': ['K', 'L', 'K', 'L'],
'BaseN1': ['M', 'N', 'M', 'N'], 'BaseN2': ['O', 'P', 'O', 'P'],
'Target1': ['Q', 'R', 'Q', 'R'], 'Target2': ['S', 'T', 'S', 'T'],
'other': ['other', np.nan, 'other', 'other']})
test = pd.DataFrame({'Onehot1': ['A', 'B', 'A'], 'Onehot2': ['C', 'D', 'ZZ'],
'Binary1': ['E', 'F', 'F'], 'Binary2': ['G', 'H', 'ZZ'],
'Ordinal1': ['I', 'J', 'J'], 'Ordinal2': ['K', 'L', 'ZZ'],
'BaseN1': ['M', 'N', 'N'], 'BaseN2': ['O', 'P', 'ZZ'],
'Target1': ['Q', 'R', 'R'], 'Target2': ['S', 'T', 'ZZ'],
'other': ['other', '123', np.nan]})
expected = pd.DataFrame({'Onehot1': ['A', 'B', 'A'], 'Onehot2': ['C', 'D', 'missing'],
'Binary1': ['E', 'F', 'F'], 'Binary2': ['G', 'H', 'missing'],
'Ordinal1': ['I', 'J', 'J'], 'Ordinal2': ['K', 'L', 'missing'],
'BaseN1': ['M', 'N', 'N'], 'BaseN2': ['O', 'P', np.nan],
'Target1': ['Q', 'R', 'R'], 'Target2': ['S', 'T', 'NaN'],
'other': ['other', '123', np.nan]})
y = pd.DataFrame(data=[0, 1, 0, 0], columns=['y'])
enc_onehot = ce.OneHotEncoder(cols=['Onehot1', 'Onehot2']).fit(train)
train_onehot = enc_onehot.transform(train)
enc_binary = ce.BinaryEncoder(cols=['Binary1', 'Binary2']).fit(train_onehot)
train_binary = enc_binary.transform(train_onehot)
enc_ordinal = ce.OrdinalEncoder(cols=['Ordinal1', 'Ordinal2']).fit(train_binary)
train_ordinal = enc_ordinal.transform(train_binary)
enc_basen = ce.BaseNEncoder(cols=['BaseN1', 'BaseN2']).fit(train_ordinal)
train_basen = enc_basen.transform(train_ordinal)
enc_target = ce.TargetEncoder(cols=['Target1', 'Target2']).fit(train_basen, y)
input_dict1 = dict()
input_dict1['col'] = 'Onehot2'
input_dict1['mapping'] = pd.Series(data=['C', 'D', np.nan], index=['C', 'D', 'missing'])
input_dict1['data_type'] = 'object'
input_dict2 = dict()
input_dict2['col'] = 'Binary2'
input_dict2['mapping'] = pd.Series(data=['G', 'H', np.nan], index=['G', 'H', 'missing'])
input_dict2['data_type'] = 'object'
input_dict3 = dict()
input_dict3['col'] = 'Ordinal2'
input_dict3['mapping'] = pd.Series(data=['K', 'L', np.nan], index=['K', 'L', 'missing'])
input_dict3['data_type'] = 'object'
list_dict = [input_dict2, input_dict3]
result1 = enc_onehot.transform(test)
result2 = enc_binary.transform(result1)
result3 = enc_ordinal.transform(result2)
result4 = enc_basen.transform(result3)
result5 = enc_target.transform(result4)
original = inverse_transform(result5, [enc_onehot, enc_binary, enc_ordinal, enc_basen, enc_target, input_dict1,
list_dict])
pd.testing.assert_frame_equal(expected, original)
def test_inverse_transform_3(self):
"""
Test target encoding
"""
train = pd.DataFrame({'city': ['chicago', 'paris', 'paris', 'chicago', 'chicago'],
'state': ['US', 'FR', 'FR', 'US', 'US'],
'other': ['A', 'A', np.nan, 'B', 'B']})
test = pd.DataFrame({'city': ['chicago', 'paris', 'paris'],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, np.nan]})
expected = pd.DataFrame({'city': ['chicago', 'paris', 'paris'],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, np.nan]})
y = pd.DataFrame(data=[0, 1, 1, 0, 1], columns=['y'])
enc = ce.TargetEncoder(cols=['city', 'state']).fit(train, y)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(expected, original)
def test_inverse_transform_4(self):
"""
Test ordinal encoding
"""
train = pd.DataFrame({'city': ['chicago', 'st louis']})
test = pd.DataFrame({'city': ['chicago', 'los angeles']})
expected = pd.DataFrame({'city': ['chicago', np.nan]})
enc = ce.OrdinalEncoder(handle_missing='value', handle_unknown='value')
enc.fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(expected, original)
def test_inverse_transform_5(self):
"""
Test inverse_transform having Nan in train and handle missing value expect returned with nan_Ordinal
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
enc = ce.OrdinalEncoder(handle_missing='value', handle_unknown='value')
result = enc.fit_transform(train)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_6(self):
"""
test inverse_transform having Nan in train and handle missing return Nan expect returned with nan_Ordinal
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
enc = ce.OrdinalEncoder(handle_missing='return_nan', handle_unknown='value')
result = enc.fit_transform(train)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_7(self):
"""
test inverse_transform both fields are return Nan with Nan Expect ValueError Ordinal
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
test = pd.DataFrame({'city': ['chicago', 'los angeles']})
enc = ce.OrdinalEncoder(handle_missing='return_nan', handle_unknown='return_nan')
enc.fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_8(self):
"""
test inverse_transform having missing and no Uknown expect inversed ordinal
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
test = pd.DataFrame({'city': ['chicago', 'los angeles']})
enc = ce.OrdinalEncoder(handle_missing='value', handle_unknown='return_nan')
enc.fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_9(self):
"""
test inverse_transform having handle missing value and handle unknown return Nan expect best inverse ordinal
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
test = pd.DataFrame({'city': ['chicago', np.nan, 'los angeles']})
expected = pd.DataFrame({'city': ['chicago', np.nan, np.nan]})
enc = ce.OrdinalEncoder(handle_missing='value', handle_unknown='return_nan')
enc.fit(train)
result = enc.transform(test)
original = enc.inverse_transform(result)
pd.testing.assert_frame_equal(expected, original)
def test_inverse_transform_10(self):
"""
test inverse_transform with multiple ordinal
"""
data = pd.DataFrame({'city': ['chicago', 'paris'],
'state': ['US', 'FR'],
'other': ['a', 'b']})
test = pd.DataFrame({'city': [1, 2, 2],
'state': [1, 2, 2],
'other': ['a', 'b', 'a']})
expected = pd.DataFrame({'city': ['chicago', 'paris', 'paris'],
'state': ['US', 'FR', 'FR'],
'other': ['a', 'b', 'a']})
enc = ce.OrdinalEncoder(cols=['city', 'state'])
enc.fit(data)
original = inverse_transform(test, enc)
pd.testing.assert_frame_equal(original, expected)
def test_inverse_transform_11(self):
"""
Test binary encoding
"""
train = pd.DataFrame({'city': ['chicago', 'paris'],
'state': ['US', 'FR'],
'other': ['A', np.nan]})
test = pd.DataFrame({'city': ['chicago', 'paris', 'monaco'],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, 'B']})
expected = pd.DataFrame({'city': ['chicago', 'paris', np.nan],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, 'B']})
enc = ce.BinaryEncoder(cols=['city', 'state']).fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(original, expected)
def test_inverse_transform_12(self):
"""
test inverse_transform having data expecting a returned result
"""
train = pd.Series(list('abcd')).to_frame('letter')
enc = ce.BaseNEncoder(base=2)
result = enc.fit_transform(train)
inversed_result = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, inversed_result)
def test_inverse_transform_13(self):
"""
Test basen encoding
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
enc = ce.BaseNEncoder(handle_missing='value', handle_unknown='value')
result = enc.fit_transform(train)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_14(self):
"""
test inverse_transform having Nan in train and handle missing expected a result with Nan
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
enc = ce.BaseNEncoder(handle_missing='return_nan', handle_unknown='value')
result = enc.fit_transform(train)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_15(self):
"""
test inverse_transform having missing and no unknown
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
test = pd.DataFrame({'city': ['chicago', 'los angeles']})
enc = ce.BaseNEncoder(handle_missing='value', handle_unknown='return_nan')
enc.fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_16(self):
"""
test inverse_transform having handle missing value and Unknown
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
test = pd.DataFrame({'city': ['chicago', np.nan, 'los angeles']})
expected = pd.DataFrame({'city': ['chicago', np.nan, np.nan]})
enc = ce.BaseNEncoder(handle_missing='value', handle_unknown='return_nan')
enc.fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(expected, original)
def test_inverse_transform_17(self):
"""
test inverse_transform with multiple baseN
"""
train = pd.DataFrame({'city': ['chicago', 'paris'],
'state': ['US', 'FR']})
test = pd.DataFrame({'city_0': [0, 1],
'city_1': [1, 0],
'state_0': [0, 1],
'state_1': [1, 0]})
enc = ce.BaseNEncoder(cols=['city', 'state'], handle_missing='value', handle_unknown='return_nan')
enc.fit(train)
original = inverse_transform(test, enc)
pd.testing.assert_frame_equal(original, train)
def test_inverse_transform_18(self):
"""
Test Onehot encoding
"""
encoder = ce.OneHotEncoder(cols=['match', 'match_box'], use_cat_names=True)
value = pd.DataFrame({'match': pd.Series('box_-1'), 'match_box': pd.Series(-1)})
transformed = encoder.fit_transform(value)
inversed_result = inverse_transform(transformed, encoder)
pd.testing.assert_frame_equal(value, inversed_result)
def test_inverse_transform_19(self):
"""
test inverse_transform having no categories names
"""
encoder = ce.OneHotEncoder(cols=['match', 'match_box'], use_cat_names=False)
value = pd.DataFrame({'match': pd.Series('box_-1'), 'match_box': pd.Series(-1)})
transformed = encoder.fit_transform(value)
inversed_result = inverse_transform(transformed, encoder)
pd.testing.assert_frame_equal(value, inversed_result)
def test_inverse_transform_20(self):
"""
test inverse_transform with Nan in training expecting Nan_Onehot returned result
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
enc = ce.OneHotEncoder(handle_missing='value', handle_unknown='value')
result = enc.fit_transform(train)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_21(self):
"""
test inverse_transform with Nan in training expecting Nan_Onehot returned result
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
enc = ce.OneHotEncoder(handle_missing='return_nan', handle_unknown='value')
result = enc.fit_transform(train)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_22(self):
"""
test inverse_transform with Both fields return_nan
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
test = pd.DataFrame({'city': ['chicago', 'los angeles']})
expected = pd.DataFrame({'city': ['chicago', np.nan]})
enc = ce.OneHotEncoder(handle_missing='return_nan', handle_unknown='return_nan')
enc.fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(original, expected)
def test_inverse_transform_23(self):
"""
test inverse_transform having missing and No Unknown
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
test = pd.DataFrame({'city': ['chicago', 'los angeles']})
enc = ce.OneHotEncoder(handle_missing='value', handle_unknown='return_nan')
enc.fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(train, original)
def test_inverse_transform_24(self):
"""
test inverse_transform having handle missing value and Handle Unknown
"""
train = pd.DataFrame({'city': ['chicago', np.nan]})
test = pd.DataFrame({'city': ['chicago', np.nan, 'los angeles']})
expected = pd.DataFrame({'city': ['chicago', np.nan, np.nan]})
enc = ce.OneHotEncoder(handle_missing='value', handle_unknown='return_nan')
enc.fit(train)
result = enc.transform(test)
original = inverse_transform(result, enc)
pd.testing.assert_frame_equal(expected, original)
def test_inverse_transform_25(self):
"""
Test dict encoding
"""
data = pd.DataFrame({'city': ['chicago', 'paris-1', 'paris-2'],
'state': ['US', 'FR-1', 'FR-2'],
'other': ['A', 'B', np.nan]})
expected = pd.DataFrame({'city': ['chicago', 'paris-1', 'paris-2'],
'state': ['US', 'FR', 'FR'],
'other': ['A', 'B', np.nan]})
input_dict = dict()
input_dict['col'] = 'state'
input_dict['mapping'] = pd.Series(data=['US', 'FR-1', 'FR-2'], index=['US', 'FR', 'FR'])
input_dict['data_type'] = 'object'
result = inverse_transform(data, input_dict)
pd.testing.assert_frame_equal(result, expected)
def test_inverse_transform_26(self):
"""
Test multiple dict encoding
"""
train = pd.DataFrame({'Onehot1': ['A', 'B', 'A', 'B'], 'Onehot2': ['C', 'D', 'C', 'D'],
'Binary1': ['E', 'F', 'E', 'F'], 'Binary2': ['G', 'H', 'G', 'H'],
'Ordinal1': ['I', 'J', 'I', 'J'], 'Ordinal2': ['K', 'L', 'K', 'L'],
'BaseN1': ['M', 'N', 'M', 'N'], 'BaseN2': ['O', 'P', 'O', 'P'],
'Target1': ['Q', 'R', 'Q', 'R'], 'Target2': ['S', 'T', 'S', 'T'],
'other': ['other', np.nan, 'other', 'other']})
test = pd.DataFrame({'Onehot1': ['A', 'B', 'A'], 'Onehot2': ['C', 'D', 'ZZ'],
'Binary1': ['E', 'F', 'F'], 'Binary2': ['G', 'H', 'ZZ'],
'Ordinal1': ['I', 'J', 'J'], 'Ordinal2': ['K', 'L', 'ZZ'],
'BaseN1': ['M', 'N', 'N'], 'BaseN2': ['O', 'P', 'ZZ'],
'Target1': ['Q', 'R', 'R'], 'Target2': ['S', 'T', 'ZZ'],
'other': ['other', '123', np.nan]},
index=['index1', 'index2', 'index3'])
expected = pd.DataFrame({'Onehot1': ['A', 'B', 'A'], 'Onehot2': ['C', 'D', 'missing'],
'Binary1': ['E', 'F', 'F'], 'Binary2': ['G', 'H', 'missing'],
'Ordinal1': ['I', 'J', 'J'], 'Ordinal2': ['K', 'L', 'missing'],
'BaseN1': ['M', 'N', 'N'], 'BaseN2': ['O', 'P', np.nan],
'Target1': ['Q', 'R', 'R'], 'Target2': ['S', 'T', 'NaN'],
'other': ['other', '123', np.nan]},
index=['index1', 'index2', 'index3'])
y = pd.DataFrame(data=[0, 1, 0, 0], columns=['y'])
enc_onehot = ce.OneHotEncoder(cols=['Onehot1', 'Onehot2']).fit(train)
train_onehot = enc_onehot.transform(train)
enc_binary = ce.BinaryEncoder(cols=['Binary1', 'Binary2']).fit(train_onehot)
train_binary = enc_binary.transform(train_onehot)
enc_ordinal = ce.OrdinalEncoder(cols=['Ordinal1', 'Ordinal2']).fit(train_binary)
train_ordinal = enc_ordinal.transform(train_binary)
enc_basen = ce.BaseNEncoder(cols=['BaseN1', 'BaseN2']).fit(train_ordinal)
train_basen = enc_basen.transform(train_ordinal)
enc_target = ce.TargetEncoder(cols=['Target1', 'Target2']).fit(train_basen, y)
input_dict1 = dict()
input_dict1['col'] = 'Onehot2'
input_dict1['mapping'] = pd.Series(data=['C', 'D', np.nan], index=['C', 'D', 'missing'])
input_dict1['data_type'] = 'object'
input_dict2 = dict()
input_dict2['col'] = 'Binary2'
input_dict2['mapping'] = pd.Series(data=['G', 'H', np.nan], index=['G', 'H', 'missing'])
input_dict2['data_type'] = 'object'
input_dict3 = dict()
input_dict3['col'] = 'Ordinal2'
input_dict3['mapping'] = pd.Series(data=['K', 'L', np.nan], index=['K', 'L', 'missing'])
input_dict3['data_type'] = 'object'
list_dict = [input_dict2, input_dict3]
result1 = enc_onehot.transform(test)
result2 = enc_binary.transform(result1)
result3 = enc_ordinal.transform(result2)
result4 = enc_basen.transform(result3)
result5 = enc_target.transform(result4)
original = inverse_transform(result5, [enc_onehot, enc_binary, enc_ordinal, enc_basen, enc_target, input_dict1,
list_dict])
pd.testing.assert_frame_equal(expected, original)
def test_transform_ce_1(self):
"""
Unit test for apply preprocessing on OneHotEncoder
"""
y = pd.DataFrame(data=[0, 1], columns=['y'])
train = pd.DataFrame({'num1': [0, 1],
'num2': [0, 2],
'other': [1, 0]})
enc = ce.one_hot.OneHotEncoder(cols=["num1", "num2"])
enc.fit(train, y)
train_preprocessed = pd.DataFrame(enc.transform(train))
clf = cb.CatBoostClassifier(n_estimators=1).fit(train_preprocessed, y)
test = pd.DataFrame({'num1': [0, 1, 1],
'num2': [0, 2, 0],
'other': [1, 0, 0]})
expected = pd.DataFrame(enc.transform(test), index=test.index)
result = apply_preprocessing(test, clf, enc)
assert result.shape == expected.shape
assert [column in clf.feature_names_ for column in result.columns]
assert all(expected.index == result.index)
def test_transform_ce_2(self):
"""
Unit test for apply preprocessing on OrdinalEncoder
"""
y = pd.DataFrame(data=[0, 1], columns=['y'])
train = pd.DataFrame({'num1': [0, 1],
'num2': [0, 2],
'other': [1, 0]})
enc = ce.ordinal.OrdinalEncoder(cols=["num1", "num2"])
enc.fit(train, y)
train_preprocessed = pd.DataFrame(enc.transform(train))
clf = cb.CatBoostClassifier(n_estimators=1).fit(train_preprocessed, y)
test = pd.DataFrame({'num1': [0, 1, 1],
'num2': [0, 2, 0],
'other': [1, 0, 0]})
expected = pd.DataFrame(enc.transform(test), index=test.index)
result = apply_preprocessing(test, clf, enc)
assert result.shape == expected.shape
assert [column in clf.feature_names_ for column in result.columns]
assert all(expected.index == result.index)
def test_transform_ce_3(self):
"""
Unit test for apply preprocessing on BaseNEncoder
"""
y = pd.DataFrame(data=[0, 1], columns=['y'])
train = pd.DataFrame({'num1': [0, 1],
'num2': [0, 2],
'other': [1, 0]})
enc = ce.basen.BaseNEncoder(cols=["num1", "num2"])
enc.fit(train, y)
train_preprocessed = pd.DataFrame(enc.transform(train))
clf = cb.CatBoostClassifier(n_estimators=1).fit(train_preprocessed, y)
test = pd.DataFrame({'num1': [0, 1, 1],
'num2': [0, 2, 0],
'other': [1, 0, 0]})
expected = pd.DataFrame(enc.transform(test), index=test.index)
result = apply_preprocessing(test, clf, enc)
assert result.shape == expected.shape
assert [column in clf.feature_names_ for column in result.columns]
assert all(expected.index == result.index)
def test_transform_ce_4(self):
"""
Unit test for apply preprocessing on BinaryEncoder
"""
y = pd.DataFrame(data=[0, 1], columns=['y'])
train = pd.DataFrame({'num1': [0, 1],
'num2': [0, 2],
'other': [1, 0]})
enc = ce.binary.BinaryEncoder(cols=["num1", "num2"])
enc.fit(train, y)
train_preprocessed = pd.DataFrame(enc.transform(train))
clf = cb.CatBoostClassifier(n_estimators=1).fit(train_preprocessed, y)
test = pd.DataFrame({'num1': [0, 1, 1],
'num2': [0, 2, 0],
'other': [1, 0, 0]})
expected = pd.DataFrame(enc.transform(test), index=test.index)
result = apply_preprocessing(test, clf, enc)
assert result.shape == expected.shape
assert [column in clf.feature_names_ for column in result.columns]
assert all(expected.index == result.index)
def test_transform_ce_5(self):
"""
Unit test for apply preprocessing with sklearn model
"""
y = pd.DataFrame(data=[0, 1], columns=['y'])
train = pd.DataFrame({'num1': [0, 1],
'num2': [0, 2],
'other': [1, 0]})
enc = ce.ordinal.OrdinalEncoder(cols=["num1", "num2"])
enc.fit(train, y)
train_preprocessed = pd.DataFrame(enc.transform(train))
clf = sklearn.ensemble._gb.GradientBoostingClassifier().fit(train_preprocessed, y)
test = pd.DataFrame({'num1': [0, 1, 1],
'num2': [0, 2, 0],
'other': [1, 0, 0]})
expected = pd.DataFrame(enc.transform(test), index=test.index)
result = apply_preprocessing(test, clf, enc)
assert result.shape == expected.shape
assert all(expected.index == result.index)
def test_transform_ce_6(self):
"""
Unit test for apply preprocessing with catboost model
"""
y = pd.DataFrame(data=[0, 1], columns=['y'])
train = pd.DataFrame({'num1': [0, 1],
'num2': [0, 2],
'other': [1, 0]})
enc = ce.ordinal.OrdinalEncoder(cols=["num1", "num2"])
enc.fit(train, y)
train_preprocessed = pd.DataFrame(enc.transform(train))
clf = cb.CatBoostClassifier(n_estimators=1).fit(train_preprocessed, y)
test = pd.DataFrame({'num1': [0, 1, 1],
'num2': [0, 2, 0],
'other': [1, 0, 0]})
expected = pd.DataFrame(enc.transform(test), index=test.index)
result = apply_preprocessing(test, clf, enc)
assert result.shape == expected.shape
assert [column in clf.feature_names_ for column in result.columns]
assert all(expected.index == result.index)
def test_transform_ce_7(self):
"""
Unit test for apply preprocessing with lightgbm model
"""
y = pd.DataFrame(data=[0, 1], columns=['y'])
train = pd.DataFrame({'num1': [0, 1],
'num2': [0, 2],
'other': [1, 0]})
enc = ce.ordinal.OrdinalEncoder(cols=["num1", "num2"])
enc.fit(train, y)
train_preprocessed = pd.DataFrame(enc.transform(train))
clf = lightgbm.sklearn.LGBMClassifier(n_estimators=1).fit(train_preprocessed, y)
test = pd.DataFrame({'num1': [0, 1, 1],
'num2': [0, 2, 0],
'other': [1, 0, 0]})
expected = pd.DataFrame(enc.transform(test), index=test.index)
result = apply_preprocessing(test, clf, enc)
assert result.shape == expected.shape
assert [column in clf.booster_.feature_name() for column in result.columns]
assert all(expected.index == result.index)
def test_transform_ce_8(self):
"""
Unit test for apply preprocessing with xgboost model
"""
y = pd.DataFrame(data=[0, 1], columns=['y'])
train = pd.DataFrame({'num1': [0, 1],
'num2': [0, 2],
'other': [1, 0]})
enc = ce.ordinal.OrdinalEncoder(cols=["num1", "num2"])
enc.fit(train, y)
train_preprocessed = pd.DataFrame(enc.transform(train))
clf = xgboost.sklearn.XGBClassifier(n_estimators=1).fit(train_preprocessed, y)
test = pd.DataFrame({'num1': [0, 1, 1],
'num2': [0, 2, 0],
'other': [1, 0, 0]})
expected = pd.DataFrame(enc.transform(test), index=test.index)
result = apply_preprocessing(test, clf, enc)
assert result.shape == expected.shape
assert [column in clf.get_booster().feature_names for column in result.columns]
assert all(expected.index == result.index)
def test_get_col_mapping_ce_1(self):
"""
Test test_get_col_mapping_ce with target encoding
"""
test = pd.DataFrame({'city': ['chicago', 'paris', 'paris'],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, np.nan]})
y = pd.DataFrame(data=[0, 1, 1], columns=['y'])
enc = ce.TargetEncoder(cols=['city', 'state'])
test_encoded = pd.DataFrame(enc.fit_transform(test, y))
mapping = get_col_mapping_ce(enc)
expected_mapping = {'city': ['city'], 'state': ['state']}
self.assertDictEqual(mapping, expected_mapping)
def test_get_col_mapping_ce_2(self):
"""
Test test_get_col_mapping_ce with target OrdinalEncoder
"""
test = pd.DataFrame({'city': ['chicago', 'paris', 'paris'],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, np.nan]})
y = pd.DataFrame(data=[0, 1, 1], columns=['y'])
enc = ce.OrdinalEncoder(handle_missing='value', handle_unknown='value')
test_encoded = pd.DataFrame(enc.fit_transform(test, y))
mapping = get_col_mapping_ce(enc)
expected_mapping = {'city': ['city'], 'state': ['state'], 'other': ['other']}
self.assertDictEqual(mapping, expected_mapping)
def test_get_col_mapping_ce_3(self):
"""
Test test_get_col_mapping_ce with target BinaryEncoder
"""
test = pd.DataFrame({'city': ['chicago', 'paris', 'paris'],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, np.nan]})
y = pd.DataFrame(data=[0, 1, 1], columns=['y'])
enc = ce.BinaryEncoder(cols=['city', 'state'])
test_encoded = pd.DataFrame(enc.fit_transform(test, y))
mapping = get_col_mapping_ce(enc)
expected_mapping = {'city': ['city_0', 'city_1'], 'state': ['state_0', 'state_1']}
self.assertDictEqual(mapping, expected_mapping)
def test_get_col_mapping_ce_4(self):
"""
Test test_get_col_mapping_ce with target BaseNEncoder
"""
test = pd.DataFrame({'city': ['chicago', 'paris', 'new york'],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, np.nan]})
y = pd.DataFrame(data=[0, 1, 1], columns=['y'])
enc = ce.BaseNEncoder(base=2)
test_encoded = pd.DataFrame(enc.fit_transform(test, y))
mapping = get_col_mapping_ce(enc)
expected_mapping = {'city': ['city_0', 'city_1', 'city_2'], 'state': ['state_0', 'state_1'],
'other': ['other_0', 'other_1']}
self.assertDictEqual(mapping, expected_mapping)
def test_get_col_mapping_ce_5(self):
"""
Test test_get_col_mapping_ce with target BaseNEncoder
"""
test = pd.DataFrame({'city': ['chicago', 'paris', 'chicago'],
'state': ['US', 'FR', 'FR'],
'other': ['A', np.nan, np.nan]})
y = pd.DataFrame(data=[0, 1, 1], columns=['y'])
enc = ce.OneHotEncoder(cols=['city', 'state'], use_cat_names=True)
test_encoded = pd.DataFrame(enc.fit_transform(test, y))
mapping = get_col_mapping_ce(enc)
expected_mapping = {'city': ['city_chicago', 'city_paris'], 'state': ['state_US', 'state_FR']}
self.assertDictEqual(mapping, expected_mapping)
| 43.503356
| 119
| 0.538507
| 3,620
| 32,410
| 4.673204
| 0.058287
| 0.070875
| 0.042561
| 0.059821
| 0.904593
| 0.884731
| 0.867411
| 0.845126
| 0.84152
| 0.815275
| 0
| 0.020697
| 0.296359
| 32,410
| 744
| 120
| 43.561828
| 0.721114
| 0.065967
| 0
| 0.776447
| 0
| 0
| 0.109477
| 0
| 0
| 0
| 0
| 0
| 0.107784
| 1
| 0.077844
| false
| 0
| 0.017964
| 0
| 0.097804
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ce348aa491bfbcad3d452b310da504123e1c57c
| 13,837
|
py
|
Python
|
tests/test_user.py
|
ndavisontest/dto-digitalmarketplace-utils
|
640f9af164a1555d274a16d2aa47d4f31b85b6cc
|
[
"MIT"
] | null | null | null |
tests/test_user.py
|
ndavisontest/dto-digitalmarketplace-utils
|
640f9af164a1555d274a16d2aa47d4f31b85b6cc
|
[
"MIT"
] | null | null | null |
tests/test_user.py
|
ndavisontest/dto-digitalmarketplace-utils
|
640f9af164a1555d274a16d2aa47d4f31b85b6cc
|
[
"MIT"
] | null | null | null |
from datetime import datetime
import mock
import pytest
from dmutils.user import user_has_role, user_logging_string, User
@pytest.fixture
def user():
return User(123, 'test@example.com', 321, 'test supplier', False, True, 'Name', 'supplier', datetime(2016, 1, 1), 5)
@pytest.fixture
def user_json():
return {
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "supplier",
"locked": False,
"active": True,
"supplier": {
"supplierCode": 321,
"name": "test supplier",
},
"termsAcceptedAt": "2016-01-01T01:00:00.0+00:00",
"application": {
"application_id": 5
}
}
}
def test_logging_string(user):
result = user_logging_string(user)
assert result
assert 'id=123' in result
assert 'role=supplier' in result
def test_user_has_role():
assert user_has_role({'users': {'role': 'admin'}}, 'admin')
def test_user_has_role_returns_false_on_invalid_json():
assert not user_has_role({'in': 'valid'}, 'admin')
def test_user_has_role_returns_false_on_none():
assert not user_has_role(None, 'admin')
def test_user_has_role_returns_false_on_non_matching_role():
assert not user_has_role({'users': {'role': 'admin'}}, 'supplier')
def test_User_from_json():
user = User.from_json({'users': {
'id': 123,
'emailAddress': 'test@example.com',
'locked': False,
'active': True,
'name': 'Name',
'role': 'admin',
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}})
assert user.id == 123
assert user.name == 'Name'
assert user.role == 'admin'
assert user.email_address == 'test@example.com'
assert not user.is_locked
assert user.is_active
def test_User_from_json_with_supplier():
user = User.from_json({'users': {
'id': 123,
'name': 'Name',
'role': 'supplier',
'emailAddress': 'test@example.com',
'locked': False,
'active': True,
'supplier': {
'supplierCode': 321,
'name': 'test supplier',
},
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}})
assert user.id == 123
assert user.name == 'Name'
assert user.role == 'supplier'
assert user.email_address == 'test@example.com'
assert user.supplier_code == 321
assert user.supplier_name == 'test supplier'
def test_User_from_json_with_application():
user = User.from_json({'users': {
'id': 123,
'name': 'Name',
'role': 'applicant',
'emailAddress': 'test@example.com',
'locked': False,
'active': True,
'application': {
'id': 5,
},
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}})
assert user.id == 123
assert user.name == 'Name'
assert user.role == 'applicant'
assert user.email_address == 'test@example.com'
assert user.application_id == 5
def test_User_from_json_without_supplier():
user = User.from_json({'users': {
'id': 123,
'name': 'Name',
'role': 'applicant',
'emailAddress': 'test@example.com',
'locked': False,
'active': True,
'supplier': None,
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}})
assert user.id == 123
assert user.name == 'Name'
assert user.role == 'applicant'
assert user.email_address == 'test@example.com'
assert user.supplier_code is None
assert user.supplier_name is None
def test_User_has_role(user_json):
user = User.from_json(user_json)
assert user.has_role('supplier')
assert not user.has_role('admin')
def test_User_has_any_role(user_json):
user = User.from_json(user_json)
assert user.has_any_role('supplier', 'other')
assert user.has_any_role('other', 'supplier')
assert not user.has_any_role('other', 'admin')
def test_User_is_part_of_team():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"permissions": [],
"name": "team name",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert user.is_part_of_team()
def test_User_is_not_part_of_team():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert not user.is_part_of_team()
def test_User_has_permission():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": False,
"permissions": ['a'],
"name": "team name",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert user.has_permission('a')
def test_User_has_no_permission():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": False,
"permissions": ['foo'],
"name": "team name",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert not user.has_permission('bar')
def test_User_has_permission_when_team_lead():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": True,
"permissions": [],
"name": "team name",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert user.has_permission('bar')
def test_when_user_is_part_of_one_team():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": True,
"permissions": [],
"name": "team name",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
team = user.get_team()
assert team['name'] == 'team name'
def test_when_user_is_part_of_two_teams():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": True,
"permissions": [],
"name": "team name 1",
}, {
"is_team_lead": True,
"permissions": [],
"name": "team name 2",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
team = user.get_team()
assert team['name'] == 'team name 1'
def test_when_user_is_part_of_two_teams_has_no_permissions():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": False,
"permissions": ['a'],
"name": "team name 1",
}, {
"is_team_lead": False,
"permissions": ['b'],
"name": "team name 2",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert not user.has_permission('a')
def test_when_user_is_part_of_two_teams_has_no_permission_because_team_leads():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": True,
"permissions": ['a'],
"name": "team name 1",
}, {
"is_team_lead": False,
"permissions": ['b'],
"name": "team name 2",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert not user.has_permission('a')
def test_when_user_is_part_of_two_teams_has_permission_because_team_leads():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": True,
"permissions": ['a'],
"name": "team name 1",
}, {
"is_team_lead": True,
"permissions": ['b'],
"name": "team name 2",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert user.has_permission('a')
def test_when_user_is_part_of_two_teams_has_permissions():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"is_team_lead": False,
"permissions": ['a'],
"name": "team name 1",
}, {
"is_team_lead": False,
"permissions": ['a'],
"name": "team name 2",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert user.has_permission('a')
def test_when_user_is_part_of_two_teams_has_permissions_when_team_id_is_given():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [{
"id": 1,
"is_team_lead": False,
"permissions": ['a'],
"name": "team name 1",
}, {
"id": 2,
"is_team_lead": False,
"permissions": ['b'],
"name": "team name 2",
}, {
"id": 3,
"is_team_lead": True,
"permissions": [],
"name": "team name 3",
}],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert user.has_permission('a', 1)
assert not user.has_permission('d', 1)
assert user.has_permission('b', 2)
assert not user.has_permission('d', 2)
assert user.has_permission('c', 3)
def test_when_user_is_not_part_of_a_team():
user = User.from_json({
"users": {
"id": 123,
"emailAddress": "test@example.com",
"name": "name",
"role": "buyer",
"locked": False,
"active": True,
"teams": [],
'termsAcceptedAt': '2016-01-01T01:00:00.0Z',
}
})
assert user.get_team() is None
def test_User_load_user(user_json):
data_api_client = mock.Mock()
data_api_client.get_user.return_value = user_json
user = User.load_user(data_api_client, 123)
data_api_client.get_user.assert_called_once_with(user_id=123)
assert user is not None
assert user.id == 123
def test_User_load_user_raises_ValueError_on_non_integer_user_id():
with pytest.raises(ValueError):
data_api_client = mock.Mock()
data_api_client.get_user.return_value = None
User.load_user(data_api_client, 'foo')
assert not data_api_client.get_user.called
def test_User_load_user_returns_None_if_no_user_is_found():
data_api_client = mock.Mock()
data_api_client.get_user.return_value = None
loaded_user = User.load_user(data_api_client, 123)
assert loaded_user is None
def test_User_load_user_returns_None_if_user_is_not_active(user_json):
user_json['users']['active'] = False
data_api_client = mock.Mock()
data_api_client.get_user.return_value = user_json
loaded_user = User.load_user(data_api_client, 123)
assert loaded_user is None
def test_user_is_active(user):
user.active = True
user.locked = False
assert user.is_active
def test_user_is_not_active_if_locked(user):
user.active = True
user.locked = True
assert not user.is_active
def test_user_is_authenticated(user):
user.active = True
user.locked = False
assert user.is_authenticated
def test_user_is_not_authenticated_if_not_active(user):
user.active = False
user.locked = False
assert not user.is_authenticated
def test_user_is_not_authenticated_if_locked(user):
user.active = True
user.locked = True
assert not user.is_authenticated
| 27.025391
| 120
| 0.517742
| 1,517
| 13,837
| 4.466051
| 0.06526
| 0.057565
| 0.038967
| 0.044871
| 0.859188
| 0.797786
| 0.766494
| 0.74952
| 0.715424
| 0.684428
| 0
| 0.044224
| 0.338151
| 13,837
| 511
| 121
| 27.078278
| 0.695567
| 0
| 0
| 0.711268
| 0
| 0
| 0.22064
| 0.02898
| 0
| 0
| 0
| 0
| 0.147887
| 1
| 0.08216
| false
| 0
| 0.00939
| 0.004695
| 0.096244
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6cecba66fd6caf937fe2d85514cef7016f2e65cd
| 117
|
py
|
Python
|
app/src/data/tasks/queries/get_task_by_id.py
|
moretonb/patterns-practice
|
00c29a74a0c74ce011028ecbc4dafc6fae91bca2
|
[
"MIT"
] | null | null | null |
app/src/data/tasks/queries/get_task_by_id.py
|
moretonb/patterns-practice
|
00c29a74a0c74ce011028ecbc4dafc6fae91bca2
|
[
"MIT"
] | null | null | null |
app/src/data/tasks/queries/get_task_by_id.py
|
moretonb/patterns-practice
|
00c29a74a0c74ce011028ecbc4dafc6fae91bca2
|
[
"MIT"
] | null | null | null |
from app.src.data.client import redis_client
def get_task_by_id(id=0):
return redis_client.hgetall(f'task:{id}')
| 29.25
| 45
| 0.769231
| 22
| 117
| 3.863636
| 0.727273
| 0.258824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009524
| 0.102564
| 117
| 4
| 45
| 29.25
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.076271
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
4c8287f433addc204b8304648c218a1dbf615d65
| 170
|
py
|
Python
|
genome_designer/test_data/full_vcf_test_set/settings.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 45
|
2015-09-30T14:55:33.000Z
|
2021-06-28T02:33:30.000Z
|
genome_designer/test_data/full_vcf_test_set/settings.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 261
|
2015-06-03T20:41:56.000Z
|
2022-03-07T08:46:10.000Z
|
genome_designer/test_data/full_vcf_test_set/settings.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 22
|
2015-06-04T20:43:10.000Z
|
2022-02-27T08:27:34.000Z
|
SIM_NGS_BIN = '/home/glebk/Projects/genome-designer/tools/simNGS/bin'
SIM_NGS_NOISE_SOURCE = '/home/glebk/Projects/genome-designer/tool-data/simNGS/HiSeq/s_1_4x.runfile'
| 56.666667
| 99
| 0.817647
| 28
| 170
| 4.714286
| 0.678571
| 0.090909
| 0.257576
| 0.348485
| 0.469697
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.035294
| 170
| 2
| 100
| 85
| 0.792683
| 0
| 0
| 0
| 0
| 0.5
| 0.747059
| 0.747059
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c8f400e70823fdab9f6a6b90899dfaf6effc26c
| 9,948
|
py
|
Python
|
src/tests/tests_basic_config.py
|
telefonicaid/pylogops
|
b1d848578d77361db261e88aa490198b386cab27
|
[
"Apache-2.0"
] | 7
|
2015-12-11T15:40:52.000Z
|
2017-01-29T17:32:35.000Z
|
src/tests/tests_basic_config.py
|
telefonicaid/pylogops
|
b1d848578d77361db261e88aa490198b386cab27
|
[
"Apache-2.0"
] | 5
|
2015-11-17T15:38:27.000Z
|
2020-06-16T15:44:00.000Z
|
src/tests/tests_basic_config.py
|
telefonicaid/pylogops
|
b1d848578d77361db261e88aa490198b386cab27
|
[
"Apache-2.0"
] | null | null | null |
import logging
import re
import time
import six
from unittest import TestCase
from pylogops.logger import TrackingFilter, JsonFormatter
from logging import FileHandler
from pylogops import local_context
if six.PY3:
from unittest.mock import patch, call # @UnusedImport @UnresolvedImport
else:
from mock import patch, call # @Reimport @UnresolvedImport
class RegexpMatch(object):
def __init__(self, value):
self.value = value
def __eq__(self, other):
return re.match(self.value, other) != None
class TestBasicConfigLogging(TestCase):
def setUp(self):
if six.PY3:
self.patch_open = patch('builtins.open')
else:
self.patch_open = patch('logging.codecs.open')
TestCase.setUp(self)
def test_json_formater(self):
with self.patch_open as open_mock:
file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
file_handler.addFilter(TrackingFilter())
file_handler.setFormatter(JsonFormatter())
logging.basicConfig()
test_logger = logging.getLogger("test")
test_logger.addHandler(file_handler)
test_logger.setLevel(logging.DEBUG)
test_logger.info("Msg")
if six.PY3:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
open_mock.return_value.write.assert_has_calls([call(RegexpMatch(
'{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","corr":null,"trans":null,"op":null,'
'"comp":"tests_basic_config","msg":"Msg"}')), call('\n')])
else:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
open_mock.return_value.write.assert_called_once_with(RegexpMatch(
'{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","corr":null,"trans":null,"op":null,'
'"comp":"tests_basic_config","msg":"Msg"}\n'))
def test_json_formater_with_localtime(self):
with self.patch_open as open_mock:
file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
file_handler.addFilter(TrackingFilter())
file_handler.setFormatter(JsonFormatter(converter=time.localtime))
logging.basicConfig()
test_logger = logging.getLogger("test")
test_logger.addHandler(file_handler)
test_logger.setLevel(logging.DEBUG)
test_logger.info("Msg")
if six.PY3:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
open_mock.return_value.write.assert_has_calls([call(RegexpMatch(
'{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","corr":null,"trans":null,"op":null,'
'"comp":"tests_basic_config","msg":"Msg"}')), call('\n')])
else:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
open_mock.return_value.write.assert_called_once_with(RegexpMatch(
'{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","corr":null,"trans":null,"op":null,'
'"comp":"tests_basic_config","msg":"Msg"}\n'))
def test_json_formater_with_keys_fmt(self):
with self.patch_open as open_mock:
file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
file_handler.addFilter(TrackingFilter())
file_handler.setFormatter(JsonFormatter(keys_fmt=[('lvl', 'levelname'), ('msg', 'message')]))
logging.basicConfig()
test_logger = logging.getLogger("test")
test_logger.addHandler(file_handler)
test_logger.setLevel(logging.DEBUG)
test_logger.info("Msg")
if six.PY3:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
open_mock.return_value.write.assert_has_calls([call(RegexpMatch(
'{"lvl":"INFO","msg":"Msg"}')), call('\n')])
else:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
open_mock.return_value.write.assert_called_once_with(RegexpMatch(
'{"lvl":"INFO","msg":"Msg"}\n'))
def test_json_formater_removing_empty_keys(self):
with self.patch_open as open_mock:
file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
file_handler.addFilter(TrackingFilter())
file_handler.setFormatter(JsonFormatter(remove_blanks=True))
logging.basicConfig()
test_logger = logging.getLogger("test")
test_logger.addHandler(file_handler)
test_logger.setLevel(logging.DEBUG)
test_logger.info("Msg")
if six.PY3:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
open_mock.return_value.write.assert_has_calls([call(RegexpMatch(
'{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","comp":"tests_basic_config","msg":"Msg"}')), call('\n')])
else:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
open_mock.return_value.write.assert_called_once_with(RegexpMatch(
'{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","comp":"tests_basic_config","msg":"Msg"}\n'))
def test_json_formater_with_extra(self):
with self.patch_open as open_mock:
file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
file_handler.addFilter(TrackingFilter())
file_handler.setFormatter(JsonFormatter())
logging.basicConfig()
test_logger = logging.getLogger("test")
test_logger.addHandler(file_handler)
test_logger.setLevel(logging.DEBUG)
test_logger.info("Msg", extra={'additional': {'key': 'extra'}})
if six.PY3:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
open_mock.return_value.write.assert_has_calls([call(RegexpMatch(
'{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","corr":null,"trans":null,"op":null,'
'"comp":"tests_basic_config","msg":"Msg","key":"extra"}')), call('\n')])
else:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
open_mock.return_value.write.assert_called_once_with(RegexpMatch(
'{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","corr":null,"trans":null,"op":null,'
'"comp":"tests_basic_config","msg":"Msg","key":"extra"}\n'))
def test_json_formater_with_transaction(self):
with self.patch_open as open_mock:
file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
file_handler.addFilter(TrackingFilter())
file_handler.setFormatter(JsonFormatter())
logging.basicConfig()
test_logger = logging.getLogger("test")
test_logger.addHandler(file_handler)
test_logger.setLevel(logging.DEBUG)
local_context.trans = "trans"
local_context.corr = "corr"
local_context.op = "op"
test_logger.info("Msg1")
test_logger.debug("Msg2")
test_logger.error("Msg3")
if six.PY3:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
open_mock.return_value.write.assert_has_calls([
call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","corr":"corr","trans":"trans","op":"op",'
'"comp":"tests_basic_config","msg":"Msg1"}')), call('\n'),
call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"DEBUG","corr":"corr","trans":"trans","op":"op",'
'"comp":"tests_basic_config","msg":"Msg2"}')), call('\n'),
call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"ERROR","corr":"corr","trans":"trans","op":"op",'
'"comp":"tests_basic_config","msg":"Msg3"}')), call('\n')
])
else:
open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
open_mock.return_value.write.assert_has_calls([
call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"INFO","corr":"corr","trans":"trans","op":"op",'
'"comp":"tests_basic_config","msg":"Msg1"}\n')),
call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"DEBUG","corr":"corr","trans":"trans","op":"op",'
'"comp":"tests_basic_config","msg":"Msg2"}\n')),
call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
'"lvl":"ERROR","corr":"corr","trans":"trans","op":"op",'
'"comp":"tests_basic_config","msg":"Msg3"}\n'))
])
| 52.914894
| 114
| 0.54423
| 1,317
| 9,948
| 3.921792
| 0.079727
| 0.037948
| 0.040658
| 0.043369
| 0.844337
| 0.841626
| 0.83698
| 0.831946
| 0.831946
| 0.831946
| 0
| 0.044197
| 0.251709
| 9,948
| 187
| 115
| 53.197861
| 0.649651
| 0.005931
| 0
| 0.715976
| 0
| 0.08284
| 0.297593
| 0.237508
| 0
| 0
| 0
| 0
| 0.142012
| 1
| 0.053254
| false
| 0
| 0.059172
| 0.005917
| 0.130178
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ca19f29c0d741d4d7add93b11d3e73d2eb3d51b
| 279
|
py
|
Python
|
homework6/app/deps.py
|
sakost/tinkoff_fintech
|
64b9d5a2a818b4db7c438b0dc53a8f31882f95ba
|
[
"MIT"
] | null | null | null |
homework6/app/deps.py
|
sakost/tinkoff_fintech
|
64b9d5a2a818b4db7c438b0dc53a8f31882f95ba
|
[
"MIT"
] | null | null | null |
homework6/app/deps.py
|
sakost/tinkoff_fintech
|
64b9d5a2a818b4db7c438b0dc53a8f31882f95ba
|
[
"MIT"
] | 2
|
2021-08-29T15:01:39.000Z
|
2022-02-23T18:48:21.000Z
|
from redis import Redis # pylint: disable=unused-import
from rq import Queue
from .redis import image_processing_queue, redis_client
def get_redis() -> 'Redis[Queue]':
return redis_client()
def get_image_processing_queue() -> Queue:
return image_processing_queue()
| 21.461538
| 56
| 0.763441
| 38
| 279
| 5.342105
| 0.368421
| 0.221675
| 0.295567
| 0.167488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150538
| 279
| 12
| 57
| 23.25
| 0.85654
| 0.103943
| 0
| 0
| 0
| 0
| 0.048387
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.428571
| 0.285714
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
e237f5509f3c79d414b024d99ac1362eacb8c958
| 30,459
|
py
|
Python
|
ckanext/datastore/tests/test_datastore.py
|
opencolorado/ckan
|
c31c8466f40f29edb63263bd36d714f6a9eb7994
|
[
"Apache-2.0"
] | 1
|
2015-03-05T03:53:11.000Z
|
2015-03-05T03:53:11.000Z
|
ckanext/datastore/tests/test_datastore.py
|
opencolorado/ckan
|
c31c8466f40f29edb63263bd36d714f6a9eb7994
|
[
"Apache-2.0"
] | null | null | null |
ckanext/datastore/tests/test_datastore.py
|
opencolorado/ckan
|
c31c8466f40f29edb63263bd36d714f6a9eb7994
|
[
"Apache-2.0"
] | null | null | null |
import json
import sqlalchemy
import ckan.plugins as p
import ckan.lib.create_test_data as ctd
import ckan.model as model
import ckan.tests as tests
import ckanext.datastore.db as db
class TestDatastoreCreate(tests.WsgiAppCase):
sysadmin_user = None
normal_user = None
p.load('datastore')
@classmethod
def setup_class(cls):
ctd.CreateTestData.create()
cls.sysadmin_user = model.User.get('testsysadmin')
cls.normal_user = model.User.get('annafan')
@classmethod
def teardown_class(cls):
model.repo.rebuild_db()
def test_create_requires_auth(self):
resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id
}
postparams = '%s=1' % json.dumps(data)
res = self.app.post('/api/action/datastore_create', params=postparams,
status=403)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_create_empty_fails(self):
postparams = '%s=1' % json.dumps({})
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_create_invalid_field_type(self):
resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id,
'fields': [{'id': 'book', 'type': 'INVALID'},
{'id': 'author', 'type': 'INVALID'}]
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_create_invalid_field_name(self):
resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id,
'fields': [{'id': '_book', 'type': 'text'},
{'id': '_author', 'type': 'text'}]
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
data = {
'resource_id': resource.id,
'fields': [{'id': '"book"', 'type': 'text'},
{'id': '"author', 'type': 'text'}]
}
postparams = '%s=1' % json.dumps(data)
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_create_invalid_record_field(self):
resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id,
'fields': [{'id': 'book', 'type': 'text'},
{'id': 'author', 'type': 'text'}],
'records': [{'book': 'annakarenina', 'author': 'tolstoy'},
{'book': 'warandpeace', 'published': '1869'}]
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_bad_records(self):
resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id,
'fields': [{'id': 'book', 'type': 'text'},
{'id': 'author', 'type': 'text'}],
'records': ['bad'] # treat author as null
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id,
'fields': [{'id': 'book', 'type': 'text'},
{'id': 'author', 'type': 'text'}],
'records': [{'book': 'annakarenina', 'author': 'tolstoy'},
[],
{'book': 'warandpeace'}] # treat author as null
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_create_basic(self):
resource = model.Package.get('annakarenina').resources[0]
data = {
'resource_id': resource.id,
'fields': [{'id': 'book', 'type': 'text'},
{'id': 'author', 'type': '_json'}],
'records': [
{'book': 'crime', 'author': ['tolstoy', 'dostoevsky']},
{'book': 'annakarenina', 'author': ['tolstoy', 'putin']},
{'book': 'warandpeace'}] # treat author as null
}
### Firstly test to see if resource things it has datastore table
postparams = '%s=1' % json.dumps({'id': resource.id})
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/resource_show', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['result']['datastore_active'] == False
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
assert res_dict['result']['resource_id'] == data['resource_id']
assert res_dict['result']['fields'] == data['fields']
assert res_dict['result']['records'] == data['records']
c = model.Session.connection()
results = c.execute('select * from "{0}"'.format(resource.id))
assert results.rowcount == 3
for i, row in enumerate(results):
assert data['records'][i].get('book') == row['book']
assert data['records'][i].get('author') == (json.loads(row['author'][0]) if row['author'] else None)
results = c.execute('''select * from "{0}" where _full_text @@ to_tsquery('warandpeace') '''.format(resource.id))
assert results.rowcount == 1, results.rowcount
results = c.execute('''select * from "{0}" where _full_text @@ to_tsquery('tolstoy') '''.format(resource.id))
assert results.rowcount == 2
model.Session.remove()
# check to test to see if resource now has a datastore table
postparams = '%s=1' % json.dumps({'id': resource.id})
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/resource_show', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['result']['datastore_active'] == True
####### insert again simple
data2 = {
'resource_id': resource.id,
'records': [{'book': 'hagji murat', 'author': ['tolstoy']}]
}
postparams = '%s=1' % json.dumps(data2)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
c = model.Session.connection()
results = c.execute('select * from "{0}"'.format(resource.id))
assert results.rowcount == 4
all_data = data['records'] + data2['records']
for i, row in enumerate(results):
assert all_data[i].get('book') == row['book']
assert all_data[i].get('author') == (json.loads(row['author'][0]) if row['author'] else None)
results = c.execute('''select * from "{0}" where _full_text @@ 'tolstoy' '''.format(resource.id))
assert results.rowcount == 3
model.Session.remove()
####### insert again extra field
data3 = {
'resource_id': resource.id,
'records': [{'book': 'crime and punsihment',
'author': ['dostoevsky'], 'rating': 'good'}]
}
postparams = '%s=1' % json.dumps(data3)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
c = model.Session.connection()
results = c.execute('select * from "{0}"'.format(resource.id))
assert results.rowcount == 5
all_data = data['records'] + data2['records'] + data3['records']
for i, row in enumerate(results):
assert all_data[i].get('book') == row['book'], (i, all_data[i].get('book'), row['book'])
assert all_data[i].get('author') == (json.loads(row['author'][0]) if row['author'] else None)
results = c.execute('''select * from "{0}" where _full_text @@ to_tsquery('dostoevsky') '''.format(resource.id))
assert results.rowcount == 2
model.Session.remove()
def test_guess_types(self):
resource = model.Package.get('annakarenina').resources[1]
data = {
'resource_id': resource.id,
'fields': [{'id': 'author', 'type': '_json'},
{'id': 'count'},
{'id': 'book'},
{'id': 'date'}],
'records': [{'book': 'annakarenina', 'author': 'tolstoy',
'count': 1, 'date': '2005-12-01', 'count2': 2},
{'book': 'crime', 'author': ['tolstoy', 'dostoevsky']},
{'book': 'warandpeace'}] # treat author as null
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
c = model.Session.connection()
results = c.execute('''select * from "{0}" '''.format(resource.id))
types = [db._pg_types[field[1]] for field in results.cursor.description]
assert types == [u'int4', u'tsvector', u'_json', u'int4',
u'text', u'timestamp', u'int4'], types
assert results.rowcount == 3
for i, row in enumerate(results):
assert data['records'][i].get('book') == row['book']
assert data['records'][i].get('author') == (json.loads(row['author'][0]) if row['author'] else None)
model.Session.remove()
### extend types
data = {
'resource_id': resource.id,
'fields': [{'id': 'author', 'type': 'text'},
{'id': 'count'},
{'id': 'book'},
{'id': 'date'},
{'id': 'count2'},
{'id': 'extra', 'type':'text'},
{'id': 'date2'},
],
'records': [{'book': 'annakarenina', 'author': 'tolstoy',
'count': 1, 'date': '2005-12-01', 'count2': 2,
'nested': [1,2], 'date2': '2005-12-01'}]
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
c = model.Session.connection()
results = c.execute('''select * from "{0}" '''.format(resource.id))
types = [db._pg_types[field[1]] for field in results.cursor.description]
assert types == [u'int4', # id
u'tsvector', # fulltext
u'_json', # author
u'int4', # count
u'text', # book
u'timestamp', # date
u'int4', # count2
u'text', # extra
u'timestamp', # date2
u'_json', # count3
], types
### fields resupplied in wrong order
data = {
'resource_id': resource.id,
'fields': [{'id': 'author', 'type': 'text'},
{'id': 'count'},
{'id': 'date'}, # date and book in wrong order
{'id': 'book'},
{'id': 'count2'},
{'id': 'extra', 'type':'text'},
{'id': 'date2'},
],
'records': [{'book': 'annakarenina', 'author': 'tolstoy',
'count': 1, 'date': '2005-12-01', 'count2': 2,
'count3': 432, 'date2': '2005-12-01'}]
}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
class TestDatastoreDelete(tests.WsgiAppCase):
sysadmin_user = None
normal_user = None
@classmethod
def setup_class(cls):
p.load('datastore')
ctd.CreateTestData.create()
cls.sysadmin_user = model.User.get('testsysadmin')
cls.normal_user = model.User.get('annafan')
resource = model.Package.get('annakarenina').resources[0]
cls.data = {
'resource_id': resource.id,
'fields': [{'id': 'book', 'type': 'text'},
{'id': 'author', 'type': 'text'}],
'records': [{'book': 'annakarenina', 'author': 'tolstoy'},
{'book': 'warandpeace', 'author': 'tolstoy'}]
}
@classmethod
def teardown_class(cls):
model.repo.rebuild_db()
def _create(self):
postparams = '%s=1' % json.dumps(self.data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
return res_dict
def _delete(self):
data = {'resource_id': self.data['resource_id']}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_delete', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
assert res_dict['result'] == data
return res_dict
def test_delete_basic(self):
self._create()
self._delete()
resource_id = self.data['resource_id']
c = model.Session.connection()
try:
# check that data was actually deleted: this should raise a
# ProgrammingError as the table should not exist any more
c.execute('select * from "{0}";'.format(resource_id))
raise Exception("Data not deleted")
except sqlalchemy.exc.ProgrammingError as e:
expected_msg = 'relation "{}" does not exist'.format(resource_id)
assert expected_msg in str(e)
model.Session.remove()
def test_delete_invalid_resource_id(self):
postparams = '%s=1' % json.dumps({'resource_id': 'bad'})
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_delete', params=postparams,
extra_environ=auth, status=404)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_delete_filters(self):
self._create()
resource_id = self.data['resource_id']
# try and delete just the 'warandpeace' row
data = {'resource_id': resource_id,
'filters': {'book': 'warandpeace'}}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_delete', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
c = model.Session.connection()
result = c.execute('select * from "{0}";'.format(resource_id))
results = [r for r in result]
assert len(results) == 1
assert results[0].book == 'annakarenina'
model.Session.remove()
# shouldn't delete anything
data = {'resource_id': resource_id,
'filters': {'book': 'annakarenina', 'author': 'bad'}}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_delete', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
c = model.Session.connection()
result = c.execute('select * from "{0}";'.format(resource_id))
results = [r for r in result]
assert len(results) == 1
assert results[0].book == 'annakarenina'
model.Session.remove()
# delete the 'annakarenina' row
data = {'resource_id': resource_id,
'filters': {'book': 'annakarenina', 'author': 'tolstoy'}}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_delete', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
c = model.Session.connection()
result = c.execute('select * from "{0}";'.format(resource_id))
results = [r for r in result]
assert len(results) == 0
model.Session.remove()
self._delete()
class TestDatastoreSearch(tests.WsgiAppCase):
sysadmin_user = None
normal_user = None
@classmethod
def setup_class(cls):
p.load('datastore')
ctd.CreateTestData.create()
cls.sysadmin_user = model.User.get('testsysadmin')
cls.normal_user = model.User.get('annafan')
resource = model.Package.get('annakarenina').resources[0]
cls.data = {
'resource_id': resource.id,
'fields': [{'id': u'b\xfck', 'type': 'text'},
{'id': 'author', 'type': 'text'},
{'id': 'published'}],
'records': [{u'b\xfck': 'annakarenina', 'author': 'tolstoy', 'published': '2005-03-01', 'nested': ['b', {'moo': 'moo'}]},
{u'b\xfck': 'warandpeace', 'author': 'tolstoy', 'nested': {'a':'b'}}
]
}
postparams = '%s=1' % json.dumps(cls.data)
auth = {'Authorization': str(cls.sysadmin_user.apikey)}
res = cls.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
cls.expected_records = [{u'published': u'2005-03-01T00:00:00',
u'_id': 1,
u'nested': [u'b', {u'moo': u'moo'}], u'b\xfck': u'annakarenina', u'author': u'tolstoy'},
{u'published': None,
u'_id': 2,
u'nested': {u'a': u'b'}, u'b\xfck': u'warandpeace', u'author': u'tolstoy'}]
@classmethod
def teardown_class(cls):
model.repo.rebuild_db()
def test_search_basic(self):
data = {'resource_id': self.data['resource_id']}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == len(self.data['records'])
assert result['records'] == self.expected_records
def test_search_invalid_field(self):
data = {'resource_id': self.data['resource_id'],
'fields': [{'id': 'bad'}]}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_search_fields(self):
data = {'resource_id': self.data['resource_id'],
'fields': [u'b\xfck']}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == len(self.data['records'])
assert result['records'] == [{u'b\xfck': 'annakarenina'},
{u'b\xfck': 'warandpeace'}], result['records']
def test_search_filters(self):
data = {'resource_id': self.data['resource_id'],
'filters': {u'b\xfck': 'annakarenina'}}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == 1
assert result['records'] == [self.expected_records[0]]
def test_search_sort(self):
data = {'resource_id': self.data['resource_id'],
'sort': u'b\xfck asc, author desc'}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == 2
assert result['records'] == self.expected_records, result['records']
data = {'resource_id': self.data['resource_id'],
'sort': [u'b\xfck desc', '"author" asc']}
postparams = '%s=1' % json.dumps(data)
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == 2
assert result['records'] == self.expected_records[::-1]
def test_search_limit(self):
data = {'resource_id': self.data['resource_id'],
'limit': 1}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == 2
assert result['records'] == [self.expected_records[0]]
def test_search_invalid_limit(self):
data = {'resource_id': self.data['resource_id'],
'limit': 'bad'}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_search_offset(self):
data = {'resource_id': self.data['resource_id'],
'limit': 1,
'offset': 1}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == 2
assert result['records'] == [self.expected_records[1]]
def test_search_invalid_offset(self):
data = {'resource_id': self.data['resource_id'],
'offset': 'bad'}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth, status=409)
res_dict = json.loads(res.body)
assert res_dict['success'] is False
def test_search_full_text(self):
data = {'resource_id': self.data['resource_id'],
'q': 'annakarenina'}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == 1
assert result['records'] == [self.expected_records[0]]
data = {'resource_id': self.data['resource_id'],
'q': 'tolstoy'}
postparams = '%s=1' % json.dumps(data)
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
result = res_dict['result']
assert result['total'] == 2
assert result['records'] == self.expected_records, result['records']
assert result['fields'] == [{u'type': u'int4', u'id': u'_id'}, {u'type': u'text', u'id': u'b\xfck'}, {u'type': u'text', u'id': u'author'}, {u'type': u'timestamp', u'id': u'published'}, {u'type': u'_json', u'id': u'nested'}], result['fields']
class TestDatastoreFullTextSearch(tests.WsgiAppCase):
@classmethod
def setup_class(cls):
p.load('datastore')
ctd.CreateTestData.create()
cls.sysadmin_user = model.User.get('testsysadmin')
cls.normal_user = model.User.get('annafan')
resource = model.Package.get('annakarenina').resources[0]
cls.data = dict(
resource_id = resource.id,
fields = [
{'id': 'id'},
{'id': 'date', 'type':'date'},
{'id': 'x'},
{'id': 'y'},
{'id': 'z'},
{'id': 'country'},
{'id': 'title'},
{'id': 'lat'},
{'id': 'lon'}
],
records = [
{'id': 0, 'date': '2011-01-01', 'x': 1, 'y': 2, 'z': 3, 'country': 'DE', 'title': 'first', 'lat':52.56, 'lon':13.40},
{'id': 1, 'date': '2011-02-02', 'x': 2, 'y': 4, 'z': 24, 'country': 'UK', 'title': 'second', 'lat':54.97, 'lon':-1.60},
{'id': 2, 'date': '2011-03-03', 'x': 3, 'y': 6, 'z': 9, 'country': 'US', 'title': 'third', 'lat':40.00, 'lon':-75.5},
{'id': 3, 'date': '2011-04-04', 'x': 4, 'y': 8, 'z': 6, 'country': 'UK', 'title': 'fourth', 'lat':57.27, 'lon':-6.20},
{'id': 4, 'date': '2011-05-04', 'x': 5, 'y': 10, 'z': 15, 'country': 'UK', 'title': 'fifth', 'lat':51.58, 'lon':0},
{'id': 5, 'date': '2011-06-02', 'x': 6, 'y': 12, 'z': 18, 'country': 'DE', 'title': 'sixth', 'lat':51.04, 'lon':7.9}
]
)
postparams = '%s=1' % json.dumps(cls.data)
auth = {'Authorization': str(cls.sysadmin_user.apikey)}
res = cls.app.post('/api/action/datastore_create', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
assert res_dict['success'] is True
@classmethod
def teardown_class(cls):
model.repo.rebuild_db()
def test_search_full_text(self):
data = {'resource_id': self.data['resource_id'],
'q': 'DE'}
postparams = '%s=1' % json.dumps(data)
auth = {'Authorization': str(self.sysadmin_user.apikey)}
res = self.app.post('/api/action/datastore_search', params=postparams,
extra_environ=auth)
res_dict = json.loads(res.body)
import pprint
assert res_dict['result']['total'] == 2, pprint.pformat(res_dict)
| 42.9
| 249
| 0.539676
| 3,401
| 30,459
| 4.717142
| 0.076448
| 0.056099
| 0.041015
| 0.036901
| 0.84261
| 0.83245
| 0.80197
| 0.790999
| 0.778844
| 0.761391
| 0
| 0.016789
| 0.299944
| 30,459
| 709
| 250
| 42.960508
| 0.735591
| 0.019666
| 0
| 0.722871
| 0
| 0
| 0.194888
| 0.036898
| 0
| 0
| 0
| 0
| 0.136895
| 1
| 0.053422
| false
| 0
| 0.013356
| 0
| 0.086811
| 0.003339
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2b9620272f307ece9dc8211b05ff404d7c7a319
| 14,421
|
py
|
Python
|
test/gdal_gmt_test.py
|
usgs/MapIO
|
82f54b979dd1cf93a6ce4735bc39115985ed74b3
|
[
"CC0-1.0"
] | 4
|
2017-09-04T15:34:00.000Z
|
2020-08-18T01:44:08.000Z
|
test/gdal_gmt_test.py
|
usgs/MapIO
|
82f54b979dd1cf93a6ce4735bc39115985ed74b3
|
[
"CC0-1.0"
] | 80
|
2015-11-09T16:12:53.000Z
|
2021-11-08T17:27:53.000Z
|
test/gdal_gmt_test.py
|
usgs/MapIO
|
82f54b979dd1cf93a6ce4735bc39115985ed74b3
|
[
"CC0-1.0"
] | 13
|
2015-11-09T16:23:12.000Z
|
2022-03-28T21:01:31.000Z
|
#!/usr/bin/env python
# python 3 compatibility
from __future__ import print_function
# stdlib imports
import os.path
import sys
from collections import OrderedDict
import warnings
import tempfile
import shutil
# third party imports
import rasterio
import numpy as np
# hack the path so that I can debug these functions if I need to
homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script?
mapiodir = os.path.abspath(os.path.join(homedir, ".."))
sys.path.insert(
0, mapiodir
) # put this at the front of the system path, ignoring any installed mapio stuff
from mapio.grid2d import Grid2D
from mapio.gdal import GDALGrid
from mapio.gmt import GMTGrid
from mapio.dataset import DataSetException, DataSetWarning
from mapio.geodict import GeoDict
FORMATS = {GDALGrid: ["EHdr"], GMTGrid: ["netcdf", "hdf", "native"]}
def test_simple_subset():
gridclasses = [GDALGrid, GMTGrid]
for gridclass in gridclasses:
for fileformat in FORMATS[gridclass]:
tdir = None
try:
geodict = GeoDict(
{
"xmin": 0,
"xmax": 4,
"ymin": 0,
"ymax": 4,
"dx": 1,
"dy": 1,
"nx": 5,
"ny": 5,
}
)
data = np.arange(1, 26, dtype=np.float32).reshape((5, 5))
tdir = tempfile.mkdtemp()
testfile = os.path.join(tdir, "test.bil")
testhdr = os.path.join(tdir, "test.hdr")
srcgrid = gridclass(data, geodict)
srcgrid.save(testfile, format=fileformat)
sampledict = GeoDict(
{
"xmin": 1,
"xmax": 3,
"ymin": 1,
"ymax": 3,
"dx": 1,
"dy": 1,
"nx": 3,
"ny": 3,
}
)
testdata = np.array(
[[7, 8, 9], [12, 13, 14], [17, 18, 19]], dtype=np.float32
)
testdict = GeoDict(
{
"xmin": 1,
"xmax": 3,
"ymin": 1,
"ymax": 3,
"dx": 1,
"dy": 1,
"nx": 3,
"ny": 3,
}
)
samplegrid = gridclass.load(testfile, sampledict)
np.testing.assert_almost_equal(samplegrid.getData(), testdata)
assert samplegrid.getGeoDict() == testdict
except Exception as e:
raise (e)
finally:
if os.path.isdir(tdir):
shutil.rmtree(tdir)
def test_simple_pad():
gridclasses = [GDALGrid, GMTGrid]
for gridclass in gridclasses:
for fileformat in FORMATS[gridclass]:
tdir = None
try:
geodict = GeoDict(
{
"xmin": 0,
"xmax": 4,
"ymin": 0,
"ymax": 4,
"dx": 1,
"dy": 1,
"nx": 5,
"ny": 5,
}
)
data = np.arange(1, 26, dtype=np.float32).reshape((5, 5))
tdir = tempfile.mkdtemp()
testfile = os.path.join(tdir, "test.bil")
testhdr = os.path.join(tdir, "test.hdr")
srcgrid = gridclass(data, geodict)
srcgrid.save(testfile, format=fileformat)
sampledict = GeoDict(
{
"xmin": -1,
"xmax": 1,
"ymin": 1,
"ymax": 3,
"dx": 1,
"dy": 1,
"nx": 3,
"ny": 3,
}
)
testdata = np.array(
[[np.nan, 6, 7], [np.nan, 11, 12], [np.nan, 16, 17]],
dtype=np.float32,
)
testdict = GeoDict(
{
"xmin": -1,
"xmax": 1,
"ymin": 1,
"ymax": 3,
"dx": 1,
"dy": 1,
"nx": 3,
"ny": 3,
}
)
samplegrid = gridclass.load(testfile, sampledict, doPadding=True)
np.testing.assert_almost_equal(samplegrid.getData(), testdata)
assert samplegrid.getGeoDict() == testdict
except Exception as e:
raise (e)
finally:
if os.path.isdir(tdir):
shutil.rmtree(tdir)
def block_test_simple_meridian():
gridclasses = [GDALGrid, GMTGrid]
for gridclass in gridclasses:
for fileformat in FORMATS[gridclass]:
tdir = None
try:
geodict = GeoDict(
{
"xmin": -180,
"xmax": 120,
"ymin": -90,
"ymax": 90,
"dx": 60,
"dy": 45,
"nx": 6,
"ny": 5,
}
)
data = np.arange(1, 31, dtype=np.float32).reshape((5, 6))
tdir = tempfile.mkdtemp()
testfile = os.path.join(tdir, "test.bil")
testhdr = os.path.join(tdir, "test.hdr")
srcgrid = gridclass(data, geodict)
srcgrid.save(testfile, format=fileformat)
sampledict = GeoDict(
{
"xmin": 60,
"xmax": -120,
"ymin": 0,
"ymax": 45,
"dx": 60,
"dy": 45,
"nx": 4,
"ny": 2,
}
)
testdata = np.array(
[
[11, 12, 7, 8],
[17, 18, 13, 14],
],
dtype=np.float32,
)
testdict = GeoDict(
{
"xmin": 60,
"xmax": -120,
"ymin": 0,
"ymax": 45,
"dx": 60,
"dy": 45,
"nx": 4,
"ny": 2,
}
)
samplegrid = gridclass.load(testfile, sampledict)
np.testing.assert_almost_equal(samplegrid.getData(), testdata)
assert samplegrid.getGeoDict() == testdict
except Exception as e:
raise (e)
finally:
if os.path.isdir(tdir):
shutil.rmtree(tdir)
def test_simple_interp():
gridclasses = [GDALGrid, GMTGrid]
for gridclass in gridclasses:
for fileformat in FORMATS[gridclass]:
tdir = None
try:
geodict = GeoDict(
{
"xmin": -180,
"xmax": 120,
"ymin": -90,
"ymax": 90,
"dx": 60,
"dy": 45,
"nx": 6,
"ny": 5,
}
)
data = np.arange(1, 31, dtype=np.float32).reshape((5, 6))
tdir = tempfile.mkdtemp()
testfile = os.path.join(tdir, "test.bil")
testhdr = os.path.join(tdir, "test.hdr")
srcgrid = gridclass(data, geodict)
srcgrid.save(testfile, format=fileformat)
sampledict = GeoDict(
{
"xmin": -90,
"xmax": 30,
"ymin": -22.5,
"ymax": 22.5,
"dx": 60,
"dy": 45,
"nx": 3,
"ny": 2,
}
)
testdata = np.array(
[
[11.5, 12.5, 13.5],
[17.5, 18.5, 19.5],
],
dtype=np.float32,
)
testdict = GeoDict(
{
"xmin": -90,
"xmax": 30,
"ymin": -22.5,
"ymax": 22.5,
"dx": 60,
"dy": 45,
"nx": 3,
"ny": 2,
}
)
samplegrid = gridclass.load(testfile, sampledict, resample=True)
np.testing.assert_almost_equal(samplegrid.getData(), testdata)
assert samplegrid.getGeoDict() == testdict
except Exception as e:
raise (e)
finally:
if os.path.isdir(tdir):
shutil.rmtree(tdir)
def block_test_meridian_interp():
gridclasses = [GDALGrid, GMTGrid]
for gridclass in gridclasses:
for fileformat in FORMATS[gridclass]:
tdir = None
try:
geodict = GeoDict(
{
"xmin": -180,
"xmax": 120,
"ymin": -90,
"ymax": 90,
"dx": 60,
"dy": 45,
"nx": 6,
"ny": 5,
}
)
data = np.arange(1, 31, dtype=np.float32).reshape((5, 6))
tdir = tempfile.mkdtemp()
testfile = os.path.join(tdir, "test.bil")
testhdr = os.path.join(tdir, "test.hdr")
srcgrid = gridclass(data, geodict)
srcgrid.save(testfile, format=fileformat)
sampledict = GeoDict(
{
"xmin": 90,
"xmax": -150,
"ymin": -22.5,
"ymax": 22.5,
"dx": 60,
"dy": 45,
"nx": 3,
"ny": 2,
}
)
testdata = np.array(
[
[14.5, 12.5, 10.5],
[20.5, 18.5, 16.5],
],
dtype=np.float32,
)
testdict = GeoDict(
{
"xmin": 90,
"xmax": -150,
"ymin": -22.5,
"ymax": 22.5,
"dx": 60,
"dy": 45,
"nx": 3,
"ny": 2,
}
)
samplegrid = gridclass.load(testfile, sampledict, resample=True)
np.testing.assert_almost_equal(samplegrid.getData(), testdata)
assert samplegrid.getGeoDict() == testdict
except Exception as e:
raise (e)
finally:
if os.path.isdir(tdir):
shutil.rmtree(tdir)
# def test_360():
# gridclasses = [GDALGrid,GMTGrid]
# for gridclass in gridclasses:
# for fileformat in FORMATS[gridclass]:
# tdir = None
# try:
# geodict = GeoDict({'xmin':-180,
# 'xmax':120,
# 'ymin':-90,
# 'ymax':90,
# 'dx':60,
# 'dy':45,
# 'nx':6,
# 'ny':5})
# data = np.arange(1,31,dtype=np.float32).reshape((5,6))
# tdir = tempfile.mkdtemp()
# testfile = os.path.join(tdir,'test.bil')
# testhdr = os.path.join(tdir,'test.hdr')
# srcgrid = gridclass(data,geodict)
# srcgrid.save(testfile,format=fileformat)
# sampledict = GeoDict({'xmin':-90,
# 'xmax':30,
# 'ymin':-22.5,
# 'ymax':22.5,
# 'dx':60,
# 'dy':45,
# 'nx':3,
# 'ny':2})
# testdata = np.array([[11.5,12.5,13.5],
# [17.5,18.5,19.5],
# ],dtype=np.float32)
# testdict = GeoDict({'xmin':-90,
# 'xmax':30,
# 'ymin':-22.5,
# 'ymax':22.5,
# 'dx':60,
# 'dy':45,
# 'nx':3,
# 'ny':2})
# samplegrid = gridclass.load(testfile,sampledict,resample=True)
# np.testing.assert_almost_equal(samplegrid.getData(),testdata)
# assert samplegrid.getGeoDict() == testdict
# except Exception as e:
# raise(e)
# finally:
# if os.path.isdir(tdir):
# shutil.rmtree(tdir)
if __name__ == "__main__":
test_simple_interp()
test_simple_subset()
# test_simple_meridian()
# test_meridian_interp()
test_simple_pad()
| 35.432432
| 81
| 0.342001
| 1,116
| 14,421
| 4.37276
| 0.148746
| 0.028279
| 0.026639
| 0.034426
| 0.82418
| 0.82418
| 0.816189
| 0.816189
| 0.807172
| 0.802459
| 0
| 0.060794
| 0.552874
| 14,421
| 406
| 82
| 35.519704
| 0.69603
| 0.170723
| 0
| 0.725904
| 0
| 0
| 0.039402
| 0
| 0
| 0
| 0
| 0
| 0.03012
| 1
| 0.01506
| false
| 0
| 0.042169
| 0
| 0.057229
| 0.003012
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2c52e2ea6b9a7234d9082b85200f3113574306a1
| 19,093
|
py
|
Python
|
test/unit/mongo_class/repset_connect.py
|
deepcoder42/mongo-lib
|
fa2b65587ab88ee90c9d85f12dd642c6295e0d94
|
[
"MIT"
] | null | null | null |
test/unit/mongo_class/repset_connect.py
|
deepcoder42/mongo-lib
|
fa2b65587ab88ee90c9d85f12dd642c6295e0d94
|
[
"MIT"
] | null | null | null |
test/unit/mongo_class/repset_connect.py
|
deepcoder42/mongo-lib
|
fa2b65587ab88ee90c9d85f12dd642c6295e0d94
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# Classification (U)
"""Program: repset_connect.py
Description: Unit testing of RepSet.connect in mongo_class.py.
Usage:
test/unit/mongo_class/repset_connect.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
import mock
# Local
sys.path.append(os.getcwd())
import mongo_class
import version
__version__ = version.__version__
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_arg_no_repset2
test_arg_no_repset
test_arg_repset2
test_arg_repset
test_fail_get_srv_attr2
test_fail_get_srv_attr
test_uri_no_repset2
test_uri_no_repset
test_uri_repset2
test_uri_repset
test_auth_arg2
test_auth_arg
test_auth_uri2
test_auth_uri
test_auth_true2
test_auth_true
test_no_auth2
test_no_auth
test_conn_true2
test_conn_true
test_conn_false2
test_conn_false
test_connections_passed2
test_connections_passed
test_no_conn_list3
test_no_conn_list2
test_no_conn_list1
test_no_conn_list
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.name = "Mongo_Server"
self.user = "mongo_user"
self.japd = "mongo_pd"
self.host = "host_server"
self.port = 27017
self.dbs = "test"
self.coll = None
self.repset = "mongo_repset"
self.repset2 = None
self.repset_hosts = "host1:27017, host2:27107"
self.db_auth = None
self.conf_file = "Conf_File"
self.use_uri = True
self.use_arg = True
self.connections = ["mongo1:27017", "mongo2:27017", "mongo3:27017"]
self.conn = "Mongo_Connection"
self.errmsg = "Error Message"
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_arg_no_repset2(self, mock_get, mock_mongo):
"""Function: test_arg_no_repset2
Description: Test with uri and no repset present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset2, auth=True, use_arg=True)
mongo.connect()
self.assertTrue(mongo.use_arg)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_arg_no_repset(self, mock_get, mock_mongo):
"""Function: test_arg_no_repset
Description: Test with arg and no repset present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset2, auth=True, use_arg=True)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_arg_repset2(self, mock_get, mock_mongo):
"""Function: test_arg_repset2
Description: Test with arg and repset present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_arg=True)
mongo.connect()
self.assertTrue(mongo.use_arg)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_arg_repset(self, mock_get, mock_mongo):
"""Function: test_arg_repset
Description: Test with arg and repset present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_arg=True)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_fail_get_srv_attr2(self, mock_get, mock_mongo):
"""Function: test_fail_get_srv_attr2
Description: Test with failed get_srv_attr call.
Arguments:
"""
mock_get.return_value = (False, self.errmsg)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_uri=True)
mongo.connect()
self.assertTrue(mongo.use_uri)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_fail_get_srv_attr(self, mock_get, mock_mongo):
"""Function: test_fail_get_srv_attr
Description: Test with failed get_srv_attr call.
Arguments:
"""
mock_get.return_value = (False, self.errmsg)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_uri=True)
self.assertEqual(mongo.connect(), (False, self.errmsg))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_uri_no_repset2(self, mock_get, mock_mongo):
"""Function: test_uri_no_repset2
Description: Test with uri and no repset present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset2, auth=True, use_uri=True)
mongo.connect()
self.assertTrue(mongo.use_uri)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_uri_no_repset(self, mock_get, mock_mongo):
"""Function: test_uri_no_repset
Description: Test with uri and no repset present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset2, auth=True, use_uri=True)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_uri_repset2(self, mock_get, mock_mongo):
"""Function: test_uri_repset2
Description: Test with uri and repset present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_uri=True)
mongo.connect()
self.assertTrue(mongo.use_uri)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_uri_repset(self, mock_get, mock_mongo):
"""Function: test_uri_repset
Description: Test with uri and repset present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_uri=True)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg2(self, mock_get, mock_mongo):
"""Function: test_auth_arg2
Description: Test with auth and arg present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_arg=True)
mongo.connect()
self.assertTrue(mongo.use_arg)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg(self, mock_get, mock_mongo):
"""Function: test_auth_arg
Description: Test with auth and arg present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_arg=True)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_uri2(self, mock_get, mock_mongo):
"""Function: test_auth_uri2
Description: Test with auth and uri present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_uri=True)
mongo.connect()
self.assertTrue(mongo.use_uri)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_uri(self, mock_get, mock_mongo):
"""Function: test_auth_uri
Description: Test with auth and uri present.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True, use_uri=True)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_true2(self, mock_get, mock_mongo):
"""Function: test_auth_true2
Description: Test with auth set to True.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True)
mongo.connect()
self.assertTrue(mongo.auth)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_true(self, mock_get, mock_mongo):
"""Function: test_auth_true
Description: Test with auth set to True.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_auth2(self, mock_get, mock_mongo):
"""Function: test_no_auth2
Description: Test with no authenication set.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=False)
mongo.connect()
self.assertFalse(mongo.auth)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_auth(self, mock_get, mock_mongo):
"""Function: test_no_auth
Description: Test with no authenication set.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=False)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_true2(self, mock_get, mock_mongo):
"""Function: test_conn_true2
Description: Test with conn set to true.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True)
mongo.connect()
self.assertTrue(mongo.auth)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_true(self, mock_get, mock_mongo):
"""Function: test_conn_true
Description: Test with conn set to true.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(
self.name, self.user, self.japd, self.host, self.port,
repset=self.repset, auth=True)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_false2(self, mock_get, mock_mongo):
"""Function: test_conn_false2
Description: Test with conn set to false.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(self.name, self.user, self.japd,
self.host, self.port, repset=self.repset)
mongo.connect()
self.assertEqual(mongo.conn, self.conn)
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_false(self, mock_get, mock_mongo):
"""Function: test_conn_false
Description: Test with conn set to false.
Arguments:
"""
mock_get.return_value = (True, None)
mock_mongo.return_value = self.conn
mongo = mongo_class.RepSet(self.name, self.user, self.japd,
self.host, self.port, repset=self.repset)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.Server.get_srv_attr")
def test_connections_passed2(self, mock_get):
"""Function: test_connections_passed2
Description: Test with connections passed.
Arguments:
"""
mock_get.return_value = (True, None)
mongo = mongo_class.RepSet(self.name, self.user, self.japd,
self.host, self.port, repset=self.repset)
mongo.conn = True
mongo.connect(connections=self.connections)
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.repset_hosts),
(self.name, self.user, self.japd, self.host, self.port,
None))
@mock.patch("mongo_class.Server.get_srv_attr")
def test_connections_passed(self, mock_get):
"""Function: test_connections_passed
Description: Test with connections passed.
Arguments:
"""
mock_get.return_value = (True, None)
mongo = mongo_class.RepSet(self.name, self.user, self.japd,
self.host, self.port, repset=self.repset)
mongo.conn = True
self.assertEqual(mongo.connect(connections=self.connections),
(True, None))
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_conn_list3(self, mock_get):
"""Function: test_no_conn_list3
Description: Test no connections passed, set by repset_hosts.
Arguments:
"""
mock_get.return_value = (True, None)
mongo = mongo_class.RepSet(self.name, self.user, self.japd,
self.host, self.port, repset=self.repset,
repset_hosts=self.repset_hosts)
mongo.conn = True
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.repset_hosts),
(self.name, self.user, self.japd, self.host, self.port,
self.repset_hosts))
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_conn_list2(self, mock_get):
"""Function: test_no_conn_list2
Description: Test no connections passed, set by repset_hosts.
Arguments:
"""
mock_get.return_value = (True, None)
mongo = mongo_class.RepSet(self.name, self.user, self.japd,
self.host, self.port, repset=self.repset,
repset_hosts=self.repset_hosts)
mongo.conn = True
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_conn_list1(self, mock_get):
"""Function: test_no_conn_list2
Description: Test with no connections passed.
Arguments:
"""
mock_get.return_value = (True, None)
mongo = mongo_class.RepSet(self.name, self.user, self.japd,
self.host, self.port, repset=self.repset)
mongo.conn = True
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.repset_hosts),
(self.name, self.user, self.japd, self.host, self.port,
None))
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_conn_list(self, mock_get):
"""Function: test_no_conn_list
Description: Test with no connections passed.
Arguments:
"""
mock_get.return_value = (True, None)
mongo = mongo_class.RepSet(self.name, self.user, self.japd,
self.host, self.port, repset=self.repset)
mongo.conn = True
self.assertEqual(mongo.connect(), (True, None))
if __name__ == "__main__":
unittest.main()
| 26.967514
| 76
| 0.621537
| 2,368
| 19,093
| 4.776182
| 0.051943
| 0.071618
| 0.061892
| 0.083996
| 0.87038
| 0.858532
| 0.847834
| 0.842706
| 0.803271
| 0.772856
| 0
| 0.006332
| 0.272089
| 19,093
| 707
| 77
| 27.005658
| 0.807454
| 0.185304
| 0
| 0.77592
| 0
| 0
| 0.118203
| 0.106956
| 0
| 0
| 0
| 0
| 0.093645
| 1
| 0.09699
| false
| 0.006689
| 0.023411
| 0
| 0.123746
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c5caac15e5d96c70b87b332f9e7161e15b09350
| 30,473
|
py
|
Python
|
tests/test_pyformance_reporter.py
|
JosephMeghanath/apptuit-py
|
ae0d038931efca94435e3a5efe5e4a4ed6f1956e
|
[
"Apache-2.0"
] | null | null | null |
tests/test_pyformance_reporter.py
|
JosephMeghanath/apptuit-py
|
ae0d038931efca94435e3a5efe5e4a4ed6f1956e
|
[
"Apache-2.0"
] | null | null | null |
tests/test_pyformance_reporter.py
|
JosephMeghanath/apptuit-py
|
ae0d038931efca94435e3a5efe5e4a4ed6f1956e
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
"""
Tests for apptuit pyformance reporter
"""
import os
import random
import socket
import time
from nose.tools import assert_raises, assert_in, assert_equals, assert_greater_equal, \
assert_not_equal, assert_is_none
from pyformance import MetricsRegistry
from requests.exceptions import HTTPError
from apptuit import ApptuitSendException, APPTUIT_PY_TOKEN, APPTUIT_PY_TAGS
from apptuit.pyformance.apptuit_reporter import ApptuitReporter, BATCH_SIZE, \
NUMBER_OF_TOTAL_POINTS, NUMBER_OF_SUCCESSFUL_POINTS, NUMBER_OF_FAILED_POINTS, DISABLE_HOST_TAG
from apptuit.utils import sanitize_name_prometheus, sanitize_name_apptuit
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
@patch('apptuit.apptuit_client.requests.post')
def test_batch_send(mock_post):
"""
Test that when we create more than BATCH_SIZE number of points
we are able to send all of them
"""
mock_post.return_value.status_code = 204
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
points_to_be_created = BATCH_SIZE * 2 + 10
counters = [registry.counter("counter%d" % i) for i in range(points_to_be_created)]
for i in range(points_to_be_created):
counters[i].inc()
reporter.report_now()
total_points_sent = reporter._meta_metrics_registry.counter(NUMBER_OF_TOTAL_POINTS).get_count()
assert_equals(total_points_sent, points_to_be_created)
@patch('apptuit.apptuit_client.requests.post')
def test_partially_successful_send(mock_post):
"""
Test that we handle partially successful sends
"""
mock_post.return_value.status_code = 400
mock_post.side_effect = ApptuitSendException("failed to send some points", 400,
success=98, failed=2, errors=[])
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
points_to_be_created = 100
counters = [registry.counter("counter%d" % i) for i in range(points_to_be_created)]
for i in range(points_to_be_created):
counters[i].inc()
with assert_raises(ApptuitSendException):
reporter.report_now()
successful_points_sent = reporter._meta_metrics_registry. \
counter(NUMBER_OF_SUCCESSFUL_POINTS).get_count()
failed_points_count = reporter._meta_metrics_registry. \
counter(NUMBER_OF_FAILED_POINTS).get_count()
assert_equals(successful_points_sent, 98)
assert_equals(failed_points_count, 2)
@patch('apptuit.apptuit_client.requests.post')
def test_send_negative(mock_post):
"""
Test negative responce from Apptuit backend
"""
mock_post.return_value.status_code = 503
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
cput = registry.histogram("cpu")
count = 0
while True:
cput.add(random.randint(1, 100))
count = count + 1
if count > 10000:
break
with assert_raises(ApptuitSendException):
reporter.report_now()
@patch('apptuit.apptuit_client.requests.post')
def test_reporter_thread_active(mock_post):
"""
Test that reporter thread is active even if we are not able to send data
"""
mock_post.return_value.status_code = 503
mock_post.side_effect = HTTPError()
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
reporter.start()
cput = registry.histogram("cpu")
cput.add(random.randint(1, 100))
time.sleep(3)
assert_greater_equal(mock_post.call_count, 2)
@patch('apptuit.apptuit_client.requests.post')
def test_invalid_metric_name(mock_post):
"""
Test for invalid metric name when reporting data
"""
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr\\",
tags=tags)
cpu = registry.histogram("cpu")
for i in range(1, 10):
cpu.add(random.randint(i, 100))
with assert_raises(ValueError) as ex:
reporter.report_now()
@patch('apptuit.apptuit_client.requests.post')
def test_invalid_tag(mock_post):
"""
Test for invalid tag key when reporting data
"""
token = "asdashdsauh_8aeraerf"
tags = {"h\\ost": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
cpu = registry.histogram("cpu")
for i in range(1, 10):
cpu.add(random.randint(i, 100))
with assert_raises(ValueError) as ex:
reporter.report_now()
def test_invalid_registry():
"""
Test for invalid registry object when reporting data
"""
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = None
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
with assert_raises(AttributeError) as ex:
reporter._collect_data_points(None, None)
@patch('apptuit.apptuit_client.requests.post')
def test_tags_with_key(mock_post):
"""
Test that additions tags work
"""
mock_post.return_value.status_code = 204
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
cpu = registry.histogram('cpu {"tagk1":22,"tagk2":"tagv2"}')
for i in range(1, 10):
cpu.add(random.randint(i, 100))
reporter.report_now()
@patch('apptuit.apptuit_client.requests.post')
def test_tags_with_key_invalid(mock_post):
"""
Test that invalid tags raise error
"""
mock_post.return_value.status_code = 204
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
cpu = registry.histogram('cpu {"tagk1":1,"tagk2":"tagv2"')
for i in range(1, 10):
cpu.add(random.randint(i, 100))
with assert_raises(ValueError):
reporter.report_now()
def test_calling_report_now():
"""
Test that report now is being called
"""
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
counter_test = registry.counter("counter")
counter_test.inc(2)
with patch('apptuit.apptuit_client.requests.post') as mock_method:
mock_method.return_value.status_code = 200
reporter.report_now()
assert_equals(mock_method.called, True)
@patch('apptuit.apptuit_client.requests.post')
def test_zero_tags(mock_post):
"""
Test that using reporter without tags does not raise error
(we add host tag)
"""
mock_post.return_value.status_code = 204
token = "asdashdsauh_8aeraerf"
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.")
counter_test = registry.counter('counter')
counter_test.inc(2)
reporter.report_now()
@patch('apptuit.apptuit_client.requests.post')
def test_zero_tags_with_host_disabled(mock_post):
"""
Test that using reporter without tags raises error
"""
mock_post.return_value.status_code = 204
token = "asdashdsauh_8aeraerf"
registry = MetricsRegistry()
with patch.dict(os.environ, {DISABLE_HOST_TAG: "True"}):
reporter = ApptuitReporter(sanitize_mode=None,
registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.")
counter_test = registry.counter('counter')
counter_test.inc(2)
with assert_raises(ValueError):
reporter.report_now()
def test_no_token():
"""
Test that no token raises error
"""
registry = MetricsRegistry()
with assert_raises(ValueError) as ex:
ApptuitReporter(sanitize_mode=None,
registry=registry,
reporting_interval=1,
prefix="apr.")
def test_reporter_tags():
"""
Test that reporter tags are working as expected
"""
mock_environ = patch.dict(os.environ, {APPTUIT_PY_TOKEN: "environ_token",
APPTUIT_PY_TAGS: 'host: environ, ip: 1.1.1.1'})
mock_environ.start()
reporter = ApptuitReporter(sanitize_mode=None, tags={"host": "reporter", "ip": "2.2.2.2"})
assert_equals(reporter.tags, {"host": "reporter", "ip": "2.2.2.2"})
reporter = ApptuitReporter(sanitize_mode=None, )
assert_equals(reporter.tags, {"host": "environ", "ip": "1.1.1.1"})
reporter = ApptuitReporter(sanitize_mode=None, tags={"test": "val"})
assert_equals(reporter.tags, {"host": "environ", "ip": "1.1.1.1", "test": "val"})
mock_environ.stop()
def test_collect_data_points():
"""
Test data is being collected correctly
"""
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
prefix="apr.",
tags=tags)
counter_test = registry.counter('counter {"tk1":"tv1","tk2":"tv2"}')
counter_test.inc(2)
dps = reporter._collect_data_points(reporter.registry)
assert_equals(len(dps), 1)
assert_equals(dps[0].value, 2)
assert_equals(dps[0].metric, "apr.counter.count")
assert_equals(dps[0].tags, {'host': 'localhost', 'region': 'us-east-1',
'service': 'web-server', 'tk1': 'tv1',
'tk2': 'tv2'})
def test_globaltags_override():
"""
Test that if the global tags and metric tags contain same tag key,
the metric tags override global tags
"""
host = socket.gethostname()
token = "asdashdsauh_8aeraerf"
tags = {"region": "us-east-1"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=tags)
counter1 = registry.counter('counter1 {"region":"us-west-2","id": 1}')
counter2 = registry.counter('counter2 {"region":"us-west-3","id": 2, "new_tag": "foo"}')
counter3 = registry.counter('counter3')
counter1.inc(2)
counter2.inc()
counter3.inc()
dps = reporter._collect_data_points(reporter.registry)
dps = sorted(dps, key=lambda x: x.metric)
assert_equals(dps[0].tags, {"region": "us-west-2", "id": 1, "host": host})
assert_equals(dps[1].tags, {"region": "us-west-3", "id": 2, "new_tag": "foo", "host": host})
assert_equals(dps[2].tags, {"region": "us-east-1", "host": host})
assert_equals(reporter.tags, {"region": "us-east-1", "host": host})
def test_globaltags_none():
"""
Test that metric tags work when global tags are not present
"""
host = socket.gethostname()
token = "asdashdsauh_8aeraerf"
tags = {"region": "us-east-1"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=None)
counter1 = registry.counter('counter1 {"region":"us-west-2","id": 1}')
counter2 = registry.counter('counter2 {"region":"us-west-3","id": 2, "new_tag": "foo"}')
counter1.inc(2)
counter2.inc()
dps = reporter._collect_data_points(reporter.registry)
dps = sorted(dps, key=lambda x: x.metric)
assert_equals(len(dps), 2)
assert_equals(dps[0].tags, {"region": "us-west-2", "id": 1, "host": host})
assert_equals(dps[1].tags, {"region": "us-west-3", "id": 2, "new_tag": "foo", "host": host})
assert_equals(reporter.tags, {"host": host})
def test_valid_prefix():
"""
Test that prefix works
"""
token = "asdashdsauh_8aeraerf"
tags = {"region": "us-east-1"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
prefix="pre-",
token=token,
tags=tags)
counter1 = registry.counter('counter1')
counter1.inc()
dps = reporter._collect_data_points(reporter.registry)
assert_equals(dps[0].metric, "pre-counter1.count")
def test_none_prefix():
"""
Test for None prefix
"""
token = "asdashdsauh_8aeraerf"
tags = {"region": "us-east-1"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
prefix=None,
token=token,
tags=tags)
counter1 = registry.counter('counter1')
counter1.inc()
dps = reporter._collect_data_points(reporter.registry)
assert_equals(dps[0].metric, "counter1.count")
@patch('apptuit.apptuit_client.requests.post')
def test_meta_metrics_of_reporter(mock_post):
"""
Test that meta metrics of reporter work
"""
mock_post.return_value.status_code = 200
token = "asdashdsauh_8aeraerf"
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=None)
cput = registry.counter("cpu.time")
cput.inc(1)
dps = reporter._collect_data_points(reporter.registry)
assert_equals(len(dps), 1)
assert_equals(dps[0].metric, "cpu.time.count")
assert_equals(dps[0].value, 1)
reporter.report_now()
dps = reporter._collect_data_points(reporter._meta_metrics_registry)
dps = sorted(dps, key=lambda x: x.metric)
assert_equals(len(dps), 18)
assert_equals(dps[0].metric, "apptuit.reporter.send.failed.count")
assert_equals(dps[1].metric, "apptuit.reporter.send.successful.count")
assert_equals(dps[11].metric, "apptuit.reporter.send.time.count")
assert_equals(dps[17].metric, "apptuit.reporter.send.total.count")
@patch('apptuit.apptuit_client.requests.post')
def test_process_metrics_of_reporter_not_active(mock_post):
"""
Test that process metrics of reporter is not active
"""
mock_post.return_value.status_code = 200
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=tags)
reporter.report_now()
assert_raises(AttributeError, lambda: reporter.resource_metric_names)
assert_raises(AttributeError, lambda: reporter.thread_metrics_names)
assert_raises(AttributeError, lambda: reporter.gc_metric_names)
@patch('apptuit.apptuit_client.requests.post')
def test_process_metrics_of_reporter_is_active(mock_post):
"""
Test that process metrics of reporter is active
"""
mock_post.return_value.status_code = 200
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=tags,
collect_process_metrics=True)
reporter.report_now()
for i in reporter.resource_metric_names:
assert_in(i, registry._counters)
for i in reporter.thread_metrics_names:
assert_in(i, registry._gauges)
for i in reporter.gc_metric_names:
assert_in(i, registry._counters)
@patch('apptuit.apptuit_client.requests.post')
def test_prometheus_sanitizer_of_reporter(mock_post):
"""
Test that prometheus_sanitizer of reporter works
"""
mock_post.return_value.status_code = 200
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region-loc": "us-east-1", "service.type/name": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode="prometheus",
registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=tags, )
assert_equals(reporter.client.sanitizer, sanitize_name_prometheus)
unicode_counter = registry.counter(u'abc.日本語')
unicode_counter.inc(1)
dps = reporter._collect_data_points(reporter.registry)
payload = reporter.client._create_payload_from_datapoints(dps)
assert_equals(payload[0]['metric'], u'abc_count')
assert_equals(payload[0]['value'], 1)
registry.clear()
cput = registry.counter('7&&cpu-time/seconds{"total-%": "100"}')
cput.inc(1)
dps = reporter._collect_data_points(reporter.registry)
payload = reporter.client._create_payload_from_datapoints(dps)
assert_equals(len(payload), 1)
assert_equals(payload[0]['metric'], "_7_cpu_time_seconds_count")
assert_equals(payload[0]['tags'], {'host': 'localhost', 'region_loc': 'us-east-1',
'service_type_name': 'web-server', 'total_': '100'})
assert_equals(payload[0]['value'], 1)
reporter.report_now()
dps = reporter._collect_data_points(reporter._meta_metrics_registry)
payload = reporter.client._create_payload_from_datapoints(dps)
assert_equals(len(payload), 18)
payload = sorted(payload, key=lambda x: x['metric'])
assert_equals(payload[0]['metric'], "apptuit_reporter_send_failed_count")
assert_equals(payload[1]['metric'], "apptuit_reporter_send_successful_count")
assert_equals(payload[11]['metric'], "apptuit_reporter_send_time_count")
assert_equals(payload[17]['metric'], "apptuit_reporter_send_total_count")
@patch('apptuit.apptuit_client.requests.post')
def test_prometheus_sanitizer_of_reporter_disabled(mock_post):
"""
Test that prometheus_sanitizer of reporter is disabled
"""
mock_post.return_value.status_code = 200
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=tags)
unicode_counter = registry.counter(u'abc.日本語')
unicode_counter.inc(1)
dps = reporter._collect_data_points(reporter.registry)
payload = reporter.client._create_payload_from_datapoints(dps)
assert_equals(payload[0]['metric'], u'abc.日本語.count')
assert_equals(payload[0]['value'], 1)
registry.clear()
cput = registry.counter("cpu.time")
cput.inc(1)
dps = reporter._collect_data_points(reporter.registry)
payload = reporter.client._create_payload_from_datapoints(dps)
assert_equals(len(payload), 1)
assert_equals(payload[0]['metric'], "cpu.time.count")
assert_equals(payload[0]['value'], 1)
reporter.report_now()
dps = reporter._collect_data_points(reporter._meta_metrics_registry)
payload = reporter.client._create_payload_from_datapoints(dps)
payload = sorted(payload, key=lambda x: x['metric'])
assert_equals(len(dps), 18)
assert_equals(payload[0]['metric'], "apptuit.reporter.send.failed.count")
assert_equals(payload[1]['metric'], "apptuit.reporter.send.successful.count")
assert_equals(payload[11]['metric'], "apptuit.reporter.send.time.count")
assert_equals(payload[17]['metric'], "apptuit.reporter.send.total.count")
@patch('apptuit.apptuit_client.requests.post')
def test_apptuit_sanitizer_of_reporter(mock_post):
"""
Test that apptuit_sanitizer of reporter works
"""
mock_post.return_value.status_code = 200
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region-loc": "us-east-1", "service.type/name": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode="apptuit",
registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=tags, )
assert_equals(reporter.client.sanitizer, sanitize_name_apptuit)
unicode_counter = registry.counter(u'abc.日本語')
unicode_counter.inc(1)
dps = reporter._collect_data_points(reporter.registry)
payload = reporter.client._create_payload_from_datapoints(dps)
assert_equals(payload[0]['metric'], u'abc.日本語.count')
assert_equals(payload[0]['value'], 1)
registry.clear()
cput = registry.counter('7&&cpu-time/seconds{"total-%": "100"}')
cput.inc(1)
dps = reporter._collect_data_points(reporter.registry)
payload = reporter.client._create_payload_from_datapoints(dps)
assert_equals(len(payload), 1)
assert_equals(payload[0]['metric'], "7_cpu-time/seconds.count")
assert_equals(payload[0]['tags'], {'host': 'localhost', 'region-loc': 'us-east-1',
'service.type/name': 'web-server', 'total-_': '100'})
assert_equals(payload[0]['value'], 1)
reporter.report_now()
dps = reporter._collect_data_points(reporter._meta_metrics_registry)
payload = reporter.client._create_payload_from_datapoints(dps)
assert_equals(len(payload), 18)
payload = sorted(payload, key=lambda x: x['metric'])
assert_equals(payload[0]['metric'], "apptuit.reporter.send.failed.count")
assert_equals(payload[1]['metric'], "apptuit.reporter.send.successful.count")
assert_equals(payload[11]['metric'], "apptuit.reporter.send.time.count")
assert_equals(payload[17]['metric'], "apptuit.reporter.send.total.count")
@patch('apptuit.apptuit_client.requests.post')
def test_reporter_registry_reset(mock_post):
"""
Test that if process id changes the registry will reset
"""
mock_post.return_value.status_code = 200
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=tags, )
cput = registry.counter("cpu.time")
cput.inc(1)
dps = reporter._collect_data_points(reporter.registry)
assert_equals(len(dps), 1)
assert_equals(dps[0].metric, "cpu.time.count")
assert_equals(dps[0].value, 1)
with patch("os.getpid") as patched_getpid:
patched_getpid.return_value = 123
reporter.report_now()
dps = reporter._collect_data_points(reporter.registry)
assert_equals(reporter.pid, 123)
assert_equals(len(dps), 0)
cput = registry.counter("cpu.time")
cput.inc(1)
dps = reporter._collect_data_points(reporter.registry)
assert_equals(len(dps), 1)
assert_equals(dps[0].metric, "cpu.time.count")
assert_equals(dps[0].value, 1)
@patch('apptuit.apptuit_client.requests.post')
def test_reporter_process_metric_names_reset(mock_post):
"""
Test that if process id changes then process metric names will reset.
"""
mock_post.return_value.status_code = 200
token = "asdashdsauh_8aeraerf"
tags = {"host": "localhost", "region": "us-east-1", "service": "web-server"}
registry = MetricsRegistry()
reporter = ApptuitReporter(sanitize_mode=None, registry=registry,
api_endpoint="http://localhost",
reporting_interval=1,
token=token,
tags=tags,
collect_process_metrics=True)
for metric_name in reporter.resource_metric_names:
ind = metric_name.find('"worker_id": ' + str(os.getpid()))
assert_not_equal(ind, -1)
for metric_name in reporter.gc_metric_names:
ind = metric_name.find('"worker_id": ' + str(os.getpid()))
assert_not_equal(ind, -1)
for metric_name in reporter.thread_metrics_names:
ind = metric_name.find('"worker_id": ' + str(os.getpid()))
assert_not_equal(ind, -1)
with patch("os.getpid") as patched_getpid:
patched_getpid.return_value = 123
reporter.report_now()
dps = reporter._collect_data_points(reporter.registry)
assert_equals(reporter.pid, 123)
assert_equals(len(dps), 0)
for metric_name in reporter.resource_metric_names:
ind = metric_name.find('"worker_id": 123')
assert_not_equal(ind, -1)
for metric_name in reporter.gc_metric_names:
ind = metric_name.find('"worker_id": 123')
assert_not_equal(ind, -1)
for metric_name in reporter.thread_metrics_names:
ind = metric_name.find('"worker_id": 123')
assert_not_equal(ind, -1)
def test_sanitizer_type():
"""
Test that sanitizer will be set based on sanitize parameter
"""
reporter = ApptuitReporter(sanitize_mode=None, token="test")
assert_is_none(reporter.client.sanitizer)
reporter = ApptuitReporter(sanitize_mode="prometheus", token="test")
assert_equals(reporter.client.sanitizer, sanitize_name_prometheus)
reporter = ApptuitReporter(sanitize_mode="apptuit", token="test")
assert_equals(reporter.client.sanitizer, sanitize_name_apptuit)
reporter = ApptuitReporter(sanitize_mode=None, token="test")
assert_equals(reporter.client.sanitizer, None)
with assert_raises(ValueError):
ApptuitReporter(sanitize_mode="unknown", token="test")
| 42.089779
| 99
| 0.616841
| 3,379
| 30,473
| 5.344481
| 0.074282
| 0.051166
| 0.050833
| 0.062019
| 0.848663
| 0.814552
| 0.79124
| 0.766543
| 0.741735
| 0.706019
| 0
| 0.017918
| 0.263774
| 30,473
| 723
| 100
| 42.147994
| 0.787029
| 0.048896
| 0
| 0.762653
| 0
| 0
| 0.161915
| 0.053714
| 0
| 0
| 0
| 0
| 0.176265
| 1
| 0.048866
| false
| 0
| 0.022688
| 0
| 0.071553
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c8f8f73026a4932039e10be53c9f239043aa2ae
| 57,097
|
py
|
Python
|
generate-grammars/python-awk/python2_actions.py
|
mbaak/histogrammar-python
|
6311f5b0eec9c75f12018f22604535c64675fdf6
|
[
"Apache-2.0"
] | 30
|
2016-09-25T16:36:06.000Z
|
2021-07-20T09:09:09.000Z
|
generate-grammars/python-awk/python2_actions.py
|
mbaak/histogrammar-python
|
6311f5b0eec9c75f12018f22604535c64675fdf6
|
[
"Apache-2.0"
] | 15
|
2016-07-26T19:41:31.000Z
|
2021-02-07T16:30:11.000Z
|
generate-grammars/python-awk/python2_actions.py
|
mbaak/histogrammar-python
|
6311f5b0eec9c75f12018f22604535c64675fdf6
|
[
"Apache-2.0"
] | 8
|
2016-09-19T20:48:37.000Z
|
2021-02-07T15:00:24.000Z
|
#!/usr/bin/env python
actions = {}
asts = []
# hgawk
asts.append('''class DollarNumber(ast.expr):
_fields = ("n",)
def __init__(self, n, **kwds):
self.n = n
self.__dict__.update(kwds)
''')
actions['''atom : DOLLARNUMBER'''] = ''' p[0] = DollarNumber(int(p[1][0][1:]), **p[1][1])'''
# Python
actions['''file_input : ENDMARKER'''] = ''' p[0] = ast.Module([], rule=inspect.currentframe().f_code.co_name, lineno=0, col_offset=0)'''
actions['''file_input : file_input_star ENDMARKER'''] = ''' p[0] = ast.Module(p[1], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1][0])'''
actions['''file_input_star : NEWLINE'''] = ''' p[0] = ast.Module([], rule=inspect.currentframe().f_code.co_name, lineno=0, col_offset=0)'''
actions['''file_input_star : stmt'''] = ''' p[0] = p[1]'''
actions['''file_input_star : file_input_star NEWLINE'''] = ''' p[0] = ast.Module(p[1], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1][0])'''
actions['''file_input_star : file_input_star stmt'''] = ''' p[0] = p[1] + p[2]'''
actions['''decorator : AT dotted_name NEWLINE'''] = ''' p[0] = p[2]
p[0].alt = p[1][1]'''
actions['''decorator : AT dotted_name LPAR RPAR NEWLINE'''] = ''' p[0] = ast.Call(p[2], [], [], None, None, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1][1])'''
actions['''decorator : AT dotted_name LPAR arglist RPAR NEWLINE'''] = ''' p[4].func = p[2]
p[0] = p[4]
inherit_lineno(p[0], p[2])
p[0].alt = p[1][1]'''
actions['''decorators : decorators_plus'''] = ''' p[0] = p[1]'''
actions['''decorators_plus : decorator'''] = ''' p[0] = [p[1]]'''
actions['''decorators_plus : decorators_plus decorator'''] = ''' p[0] = p[1] + [p[2]]'''
actions['''decorated : decorators classdef'''] = ''' p[2].decorator_list = p[1]
p[0] = p[2]
inherit_lineno(p[0], p[1][0])'''
actions['''decorated : decorators funcdef'''] = ''' p[2].decorator_list = p[1]
p[0] = p[2]
inherit_lineno(p[0], p[1][0])'''
actions['''funcdef : DEF NAME parameters COLON suite'''] = ''' p[0] = ast.FunctionDef(p[2][0], p[3], p[5], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''parameters : LPAR RPAR'''] = ''' p[0] = ast.arguments([], None, None, [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''parameters : LPAR varargslist RPAR'''] = ''' p[0] = p[2]'''
actions['''varargslist : fpdef COMMA STAR NAME'''] = ''' p[0] = ast.arguments([p[1]], p[4][0], None, [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef COMMA STAR NAME COMMA DOUBLESTAR NAME'''] = ''' p[0] = ast.arguments([p[1]], p[4][0], p[7][0], [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef COMMA DOUBLESTAR NAME'''] = ''' p[0] = ast.arguments([p[1]], None, p[4][0], [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef'''] = ''' p[0] = ast.arguments([p[1]], None, None, [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef COMMA'''] = ''' p[0] = ast.arguments([p[1]], None, None, [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef varargslist_star COMMA STAR NAME'''] = ''' p[2].args.insert(0, p[1])
p[2].vararg = p[5][0]
p[0] = p[2]'''
actions['''varargslist : fpdef varargslist_star COMMA STAR NAME COMMA DOUBLESTAR NAME'''] = ''' p[2].args.insert(0, p[1])
p[2].vararg = p[5][0]
p[2].kwarg = p[8][0]
p[0] = p[2]'''
actions['''varargslist : fpdef varargslist_star COMMA DOUBLESTAR NAME'''] = ''' p[2].args.insert(0, p[1])
p[2].kwarg = p[5][0]
p[0] = p[2]'''
actions['''varargslist : fpdef varargslist_star'''] = ''' p[2].args.insert(0, p[1])
p[0] = p[2]'''
actions['''varargslist : fpdef varargslist_star COMMA'''] = ''' p[2].args.insert(0, p[1])
p[0] = p[2]'''
actions['''varargslist : fpdef EQUAL test COMMA STAR NAME'''] = ''' p[0] = ast.arguments([p[1]], p[6][0], None, [p[3]], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef EQUAL test COMMA STAR NAME COMMA DOUBLESTAR NAME'''] = ''' p[0] = ast.arguments([p[1]], p[6][0], p[9][0], [p[3]], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef EQUAL test COMMA DOUBLESTAR NAME'''] = ''' p[0] = ast.arguments([p[1]], None, p[6][0], [p[3]], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef EQUAL test'''] = ''' p[0] = ast.arguments([p[1]], None, None, [p[3]], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef EQUAL test COMMA'''] = ''' p[0] = ast.arguments([p[1]], None, None, [p[3]], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''varargslist : fpdef EQUAL test varargslist_star COMMA STAR NAME'''] = ''' p[4].args.insert(0, p[1])
p[4].vararg = p[7][0]
p[4].defaults.insert(0, p[3])
p[0] = p[4]'''
actions['''varargslist : fpdef EQUAL test varargslist_star COMMA STAR NAME COMMA DOUBLESTAR NAME'''] = ''' p[4].args.insert(0, p[1])
p[4].vararg = p[7][0]
p[4].kwarg = p[10][0]
p[4].defaults.insert(0, p[3])
p[0] = p[4]'''
actions['''varargslist : fpdef EQUAL test varargslist_star COMMA DOUBLESTAR NAME'''] = ''' p[4].args.insert(0, p[1])
p[4].kwarg = p[7][0]
p[4].defaults.insert(0, p[3])
p[0] = p[4]'''
actions['''varargslist : fpdef EQUAL test varargslist_star'''] = ''' p[4].args.insert(0, p[1])
p[4].defaults.insert(0, p[3])
p[0] = p[4]'''
actions['''varargslist : fpdef EQUAL test varargslist_star COMMA'''] = ''' p[4].args.insert(0, p[1])
p[4].defaults.insert(0, p[3])
p[0] = p[4]'''
actions['''varargslist : STAR NAME'''] = ''' p[0] = ast.arguments([], p[2][0], None, [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[2][1])'''
actions['''varargslist : STAR NAME COMMA DOUBLESTAR NAME'''] = ''' p[0] = ast.arguments([], p[2][0], p[5][0], [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[2][1])'''
actions['''varargslist : DOUBLESTAR NAME'''] = ''' p[0] = ast.arguments([], None, p[2][0], [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[2][1])'''
actions['''varargslist_star : COMMA fpdef'''] = ''' p[0] = ast.arguments([p[2]], None, None, [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[2])'''
actions['''varargslist_star : COMMA fpdef EQUAL test'''] = ''' p[0] = ast.arguments([p[2]], None, None, [p[4]], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[2])'''
actions['''varargslist_star : varargslist_star COMMA fpdef'''] = ''' p[1].args.append(p[3])
p[0] = p[1]'''
actions['''varargslist_star : varargslist_star COMMA fpdef EQUAL test'''] = ''' p[1].args.append(p[3])
p[1].defaults.append(p[5])
p[0] = p[1]'''
actions['''fpdef : NAME'''] = ''' p[0] = ast.Name(p[1][0], ast.Param(), rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''fpdef : LPAR fplist RPAR'''] = ''' if isinstance(p[2], ast.Tuple):
p[2].paren = True
ctx_to_store(p[2])
p[0] = p[2]'''
actions['''fplist : fpdef'''] = ''' p[0] = p[1]'''
actions['''fplist : fpdef COMMA'''] = ''' p[0] = ast.Tuple([p[1]], ast.Param(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''fplist : fpdef fplist_star'''] = ''' p[2].elts.insert(0, p[1])
p[0] = p[2]
inherit_lineno(p[0], p[1])'''
actions['''fplist : fpdef fplist_star COMMA'''] = ''' p[2].elts.insert(0, p[1])
p[0] = p[2]
inherit_lineno(p[0], p[1])'''
actions['''fplist_star : COMMA fpdef'''] = ''' p[0] = ast.Tuple([p[2]], ast.Param(), rule=inspect.currentframe().f_code.co_name, paren=False)'''
actions['''fplist_star : fplist_star COMMA fpdef'''] = ''' p[1].elts.append(p[3])
p[0] = p[1]'''
actions['''stmt : simple_stmt'''] = ''' p[0] = p[1]'''
actions['''stmt : compound_stmt'''] = ''' p[0] = p[1]'''
actions['''simple_stmt : small_stmt NEWLINE'''] = ''' p[0] = [p[1]]'''
actions['''simple_stmt : small_stmt SEMI NEWLINE'''] = ''' p[0] = [p[1]]'''
actions['''simple_stmt : small_stmt simple_stmt_star NEWLINE'''] = ''' p[0] = [p[1]] + p[2]'''
actions['''simple_stmt : small_stmt simple_stmt_star SEMI NEWLINE'''] = ''' p[0] = [p[1]] + p[2]'''
actions['''simple_stmt_star : SEMI small_stmt'''] = ''' p[0] = [p[2]]'''
actions['''simple_stmt_star : simple_stmt_star SEMI small_stmt'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''small_stmt : expr_stmt'''] = ''' p[0] = p[1]'''
actions['''small_stmt : print_stmt'''] = ''' p[0] = p[1]'''
actions['''small_stmt : del_stmt'''] = ''' p[0] = p[1]'''
actions['''small_stmt : pass_stmt'''] = ''' p[0] = p[1]'''
actions['''small_stmt : flow_stmt'''] = ''' p[0] = p[1]'''
actions['''small_stmt : import_stmt'''] = ''' p[0] = p[1]'''
actions['''small_stmt : global_stmt'''] = ''' p[0] = p[1]'''
actions['''small_stmt : exec_stmt'''] = ''' p[0] = p[1]'''
actions['''small_stmt : assert_stmt'''] = ''' p[0] = p[1]'''
actions['''expr_stmt : testlist augassign yield_expr'''] = ''' ctx_to_store(p[1])
p[0] = ast.AugAssign(p[1], p[2], p[3], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''expr_stmt : testlist augassign testlist'''] = ''' ctx_to_store(p[1])
p[0] = ast.AugAssign(p[1], p[2], p[3], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''expr_stmt : testlist'''] = ''' p[0] = ast.Expr(p[1], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''expr_stmt : testlist expr_stmt_star'''] = ''' everything = [p[1]] + p[2]
targets, value = everything[:-1], everything[-1]
ctx_to_store(targets)
p[0] = ast.Assign(targets, value, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], targets[0])'''
actions['''expr_stmt_star : EQUAL yield_expr'''] = ''' p[0] = [p[2]]'''
actions['''expr_stmt_star : EQUAL testlist'''] = ''' p[0] = [p[2]]'''
actions['''expr_stmt_star : expr_stmt_star EQUAL yield_expr'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''expr_stmt_star : expr_stmt_star EQUAL testlist'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''augassign : PLUSEQUAL'''] = ''' p[0] = ast.Add(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : MINEQUAL'''] = ''' p[0] = ast.Sub(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : STAREQUAL'''] = ''' p[0] = ast.Mult(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : SLASHEQUAL'''] = ''' p[0] = ast.Div(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : PERCENTEQUAL'''] = ''' p[0] = ast.Mod(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : AMPEREQUAL'''] = ''' p[0] = ast.BitAnd(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : VBAREQUAL'''] = ''' p[0] = ast.BitOr(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : CIRCUMFLEXEQUAL'''] = ''' p[0] = ast.BitXor(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : LEFTSHIFTEQUAL'''] = ''' p[0] = ast.LShift(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : RIGHTSHIFTEQUAL'''] = ''' p[0] = ast.RShift(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : DOUBLESTAREQUAL'''] = ''' p[0] = ast.Pow(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''augassign : DOUBLESLASHEQUAL'''] = ''' p[0] = ast.FloorDiv(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt : PRINT'''] = ''' p[0] = ast.Print(None, [], True, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt : PRINT test'''] = ''' p[0] = ast.Print(None, [p[2]], True, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt : PRINT test COMMA'''] = ''' p[0] = ast.Print(None, [p[2]], False, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt : PRINT test print_stmt_plus'''] = ''' p[0] = ast.Print(None, [p[2]] + p[3], True, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt : PRINT test print_stmt_plus COMMA'''] = ''' p[0] = ast.Print(None, [p[2]] + p[3], False, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt : PRINT RIGHTSHIFT test'''] = ''' p[0] = ast.Print(p[3], [], True, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt : PRINT RIGHTSHIFT test print_stmt_plus'''] = ''' p[0] = ast.Print(p[3], p[4], True, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt : PRINT RIGHTSHIFT test print_stmt_plus COMMA'''] = ''' p[0] = ast.Print(p[3], p[4], False, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''print_stmt_plus : COMMA test'''] = ''' p[0] = [p[2]]'''
actions['''print_stmt_plus : print_stmt_plus COMMA test'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''del_stmt : DEL exprlist'''] = ''' ctx_to_store(p[2], ast.Del) # interesting fact: evaluating Delete nodes with ctx=Store() causes a segmentation fault in Python!
if isinstance(p[2], ast.Tuple) and not p[2].paren:
p[0] = ast.Delete(p[2].elts, rule=inspect.currentframe().f_code.co_name, **p[1][1])
else:
p[0] = ast.Delete([p[2]], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''pass_stmt : PASS'''] = ''' p[0] = ast.Pass(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''flow_stmt : break_stmt'''] = ''' p[0] = p[1]'''
actions['''flow_stmt : continue_stmt'''] = ''' p[0] = p[1]'''
actions['''flow_stmt : return_stmt'''] = ''' p[0] = p[1]'''
actions['''flow_stmt : raise_stmt'''] = ''' p[0] = p[1]'''
actions['''flow_stmt : yield_stmt'''] = ''' p[0] = ast.Expr(p[1], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''break_stmt : BREAK'''] = ''' p[0] = ast.Break(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''continue_stmt : CONTINUE'''] = ''' p[0] = ast.Continue(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''return_stmt : RETURN'''] = ''' p[0] = ast.Return(None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''return_stmt : RETURN testlist'''] = ''' p[0] = ast.Return(p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''yield_stmt : yield_expr'''] = ''' p[0] = p[1]'''
actions['''raise_stmt : RAISE'''] = ''' p[0] = ast.Raise(None, None, None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''raise_stmt : RAISE test'''] = ''' p[0] = ast.Raise(p[2], None, None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''raise_stmt : RAISE test COMMA test'''] = ''' p[0] = ast.Raise(p[2], p[4], None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''raise_stmt : RAISE test COMMA test COMMA test'''] = ''' p[0] = ast.Raise(p[2], p[4], p[6], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_stmt : import_name'''] = ''' p[0] = p[1]'''
actions['''import_stmt : import_from'''] = ''' p[0] = p[1]'''
actions['''import_name : IMPORT dotted_as_names'''] = ''' p[0] = ast.Import(p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from : FROM dotted_name IMPORT STAR'''] = ''' dotted = []
last = p[2]
while isinstance(last, ast.Attribute):
dotted.insert(0, last.attr)
last = last.value
dotted.insert(0, last.id)
p[0] = ast.ImportFrom(".".join(dotted), [ast.alias("*", None, rule=inspect.currentframe().f_code.co_name, **p[3][1])], 0, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from : FROM dotted_name IMPORT LPAR import_as_names RPAR'''] = ''' dotted = []
last = p[2]
while isinstance(last, ast.Attribute):
dotted.insert(0, last.attr)
last = last.value
dotted.insert(0, last.id)
p[0] = ast.ImportFrom(".".join(dotted), p[5], 0, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from : FROM dotted_name IMPORT import_as_names'''] = ''' dotted = []
last = p[2]
while isinstance(last, ast.Attribute):
dotted.insert(0, last.attr)
last = last.value
dotted.insert(0, last.id)
p[0] = ast.ImportFrom(".".join(dotted), p[4], 0, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from : FROM import_from_plus dotted_name IMPORT STAR'''] = ''' dotted = []
last = p[3]
while isinstance(last, ast.Attribute):
dotted.insert(0, last.attr)
last = last.value
dotted.insert(0, last.id)
p[0] = ast.ImportFrom(".".join(dotted), [ast.alias("*", None, rule=inspect.currentframe().f_code.co_name, **p[4][1])], p[2], **p[1][1])'''
actions['''import_from : FROM import_from_plus dotted_name IMPORT LPAR import_as_names RPAR'''] = ''' dotted = []
last = p[3]
while isinstance(last, ast.Attribute):
dotted.insert(0, last.attr)
last = last.value
dotted.insert(0, last.id)
p[0] = ast.ImportFrom(".".join(dotted), p[6], p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from : FROM import_from_plus dotted_name IMPORT import_as_names'''] = ''' dotted = []
last = p[3]
while isinstance(last, ast.Attribute):
dotted.insert(0, last.attr)
last = last.value
dotted.insert(0, last.id)
p[0] = ast.ImportFrom(".".join(dotted), p[5], p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from : FROM import_from_plus IMPORT STAR'''] = ''' p[0] = ast.ImportFrom(None, [ast.alias("*", None, rule=inspect.currentframe().f_code.co_name, **p[3][1])], p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from : FROM import_from_plus IMPORT LPAR import_as_names RPAR'''] = ''' p[0] = ast.ImportFrom(None, p[5], p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from : FROM import_from_plus IMPORT import_as_names'''] = ''' p[0] = ast.ImportFrom(None, p[4], p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_from_plus : DOT'''] = ''' p[0] = 1'''
actions['''import_from_plus : import_from_plus DOT'''] = ''' p[0] = p[1] + 1'''
actions['''import_as_name : NAME'''] = ''' p[0] = ast.alias(p[1][0], None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''import_as_name : NAME AS NAME'''] = ''' p[0] = ast.alias(p[1][0], p[3][0], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''dotted_as_name : dotted_name'''] = ''' dotted = []
last = p[1]
while isinstance(last, ast.Attribute):
dotted.insert(0, last.attr)
last = last.value
dotted.insert(0, last.id)
p[0] = ast.alias(".".join(dotted), None, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''dotted_as_name : dotted_name AS NAME'''] = ''' dotted = []
last = p[1]
while isinstance(last, ast.Attribute):
dotted.insert(0, last.attr)
last = last.value
dotted.insert(0, last.id)
p[0] = ast.alias(".".join(dotted), p[3][0], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''import_as_names : import_as_name'''] = ''' p[0] = [p[1]]'''
actions['''import_as_names : import_as_name COMMA'''] = ''' p[0] = [p[1]]'''
actions['''import_as_names : import_as_name import_as_names_star'''] = ''' p[0] = [p[1]] + p[2]'''
actions['''import_as_names : import_as_name import_as_names_star COMMA'''] = ''' p[0] = [p[1]] + p[2]'''
actions['''import_as_names_star : COMMA import_as_name'''] = ''' p[0] = [p[2]]'''
actions['''import_as_names_star : import_as_names_star COMMA import_as_name'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''dotted_as_names : dotted_as_name'''] = ''' p[0] = [p[1]]'''
actions['''dotted_as_names : dotted_as_name dotted_as_names_star'''] = ''' p[0] = [p[1]] + p[2]'''
actions['''dotted_as_names_star : COMMA dotted_as_name'''] = ''' p[0] = [p[2]]'''
actions['''dotted_as_names_star : dotted_as_names_star COMMA dotted_as_name'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''dotted_name : NAME'''] = ''' p[0] = ast.Name(p[1][0], ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''dotted_name : NAME dotted_name_star'''] = ''' last = p[2]
if isinstance(last, ast.Attribute):
inherit_lineno(last, p[1][1])
while isinstance(last.value, ast.Attribute):
last = last.value
inherit_lineno(last, p[1][1])
last.value = ast.Attribute(ast.Name(p[1][0], ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1]), last.value, ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1])
p[0] = p[2]
else:
p[0] = ast.Attribute(ast.Name(p[1][0], ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''dotted_name_star : DOT NAME'''] = ''' p[0] = p[2][0]'''
actions['''dotted_name_star : dotted_name_star DOT NAME'''] = ''' p[0] = ast.Attribute(p[1], p[3][0], ast.Load(), rule=inspect.currentframe().f_code.co_name)'''
actions['''global_stmt : GLOBAL NAME'''] = ''' p[0] = ast.Global([p[2][0]], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''global_stmt : GLOBAL NAME global_stmt_star'''] = ''' p[0] = ast.Global([p[2][0]] + p[3], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''global_stmt_star : COMMA NAME'''] = ''' p[0] = [p[2][0]]'''
actions['''global_stmt_star : global_stmt_star COMMA NAME'''] = ''' p[0] = p[1] + [p[3][0]]'''
actions['''exec_stmt : EXEC expr'''] = ''' p[0] = ast.Exec(p[2], None, None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''exec_stmt : EXEC expr IN test'''] = ''' p[0] = ast.Exec(p[2], p[4], None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''exec_stmt : EXEC expr IN test COMMA test'''] = ''' p[0] = ast.Exec(p[2], p[4], p[6], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''assert_stmt : ASSERT test'''] = ''' p[0] = ast.Assert(p[2], None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''assert_stmt : ASSERT test COMMA test'''] = ''' p[0] = ast.Assert(p[2], p[4], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''compound_stmt : if_stmt'''] = ''' p[0] = [p[1]]'''
actions['''compound_stmt : while_stmt'''] = ''' p[0] = [p[1]]'''
actions['''compound_stmt : for_stmt'''] = ''' p[0] = [p[1]]'''
actions['''compound_stmt : try_stmt'''] = ''' p[0] = [p[1]]'''
actions['''compound_stmt : with_stmt'''] = ''' p[0] = [p[1]]'''
actions['''compound_stmt : funcdef'''] = ''' p[0] = [p[1]]'''
actions['''compound_stmt : classdef'''] = ''' p[0] = [p[1]]'''
actions['''compound_stmt : decorated'''] = ''' p[0] = [p[1]]'''
actions['''if_stmt : IF test COLON suite'''] = ''' p[0] = ast.If(p[2], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''if_stmt : IF test COLON suite ELSE COLON suite'''] = ''' p[0] = ast.If(p[2], p[4], p[7], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''if_stmt : IF test COLON suite if_stmt_star'''] = ''' p[0] = ast.If(p[2], p[4], [p[5]], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''if_stmt : IF test COLON suite if_stmt_star ELSE COLON suite'''] = ''' last = p[5]
while len(last.orelse) > 0:
last = last.orelse[0]
last.orelse.extend(p[8])
p[0] = ast.If(p[2], p[4], [p[5]], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''if_stmt_star : ELIF test COLON suite'''] = ''' p[0] = ast.If(p[2], p[4], [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[2])'''
actions['''if_stmt_star : if_stmt_star ELIF test COLON suite'''] = ''' last = p[1]
while len(last.orelse) > 0:
last = last.orelse[0]
last.orelse.append(ast.If(p[3], p[5], [], rule=inspect.currentframe().f_code.co_name))
inherit_lineno(last.orelse[-1], p[3])
p[0] = p[1]'''
actions['''while_stmt : WHILE test COLON suite'''] = ''' p[0] = ast.While(p[2], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''while_stmt : WHILE test COLON suite ELSE COLON suite'''] = ''' p[0] = ast.While(p[2], p[4], p[7], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''for_stmt : FOR exprlist IN testlist COLON suite'''] = ''' ctx_to_store(p[2])
p[0] = ast.For(p[2], p[4], p[6], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''for_stmt : FOR exprlist IN testlist COLON suite ELSE COLON suite'''] = ''' ctx_to_store(p[2])
p[0] = ast.For(p[2], p[4], p[6], p[9], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''try_stmt : TRY COLON suite try_stmt_plus'''] = ''' p[0] = ast.TryExcept(p[3], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''try_stmt : TRY COLON suite try_stmt_plus FINALLY COLON suite'''] = ''' p[0] = ast.TryFinally([ast.TryExcept(p[3], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])], p[7], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''try_stmt : TRY COLON suite try_stmt_plus ELSE COLON suite'''] = ''' p[0] = ast.TryExcept(p[3], p[4], p[7], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''try_stmt : TRY COLON suite try_stmt_plus ELSE COLON suite FINALLY COLON suite'''] = ''' p[0] = ast.TryFinally([ast.TryExcept(p[3], p[4], p[7], rule=inspect.currentframe().f_code.co_name, **p[1][1])], p[10], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''try_stmt : TRY COLON suite FINALLY COLON suite'''] = ''' p[0] = ast.TryFinally(p[3], p[6], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''try_stmt_plus : except_clause COLON suite'''] = ''' p[1].body = p[3]
p[0] = [p[1]]'''
actions['''try_stmt_plus : try_stmt_plus except_clause COLON suite'''] = ''' p[2].body = p[4]
p[0] = p[1] + [p[2]]'''
actions['''with_stmt : WITH with_item COLON suite'''] = ''' p[2].body = p[4]
p[0] = p[2]'''
actions['''with_stmt : WITH with_item with_stmt_star COLON suite'''] = ''' p[2].body.append(p[3])
last = p[2]
while len(last.body) > 0:
last = last.body[0]
last.body = p[5]
p[0] = p[2]'''
actions['''with_stmt_star : COMMA with_item'''] = ''' p[0] = p[2]'''
actions['''with_stmt_star : with_stmt_star COMMA with_item'''] = ''' last = p[1]
while len(last.body) > 0:
last = last.body[0]
last.body.append(p[3])
p[0] = p[1]'''
actions['''with_item : test'''] = ''' p[0] = ast.With(p[1], None, [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''with_item : test AS expr'''] = ''' ctx_to_store(p[3])
p[0] = ast.With(p[1], p[3], [], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''except_clause : EXCEPT'''] = ''' p[0] = ast.ExceptHandler(None, None, [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''except_clause : EXCEPT test'''] = ''' p[0] = ast.ExceptHandler(p[2], None, [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''except_clause : EXCEPT test AS test'''] = ''' ctx_to_store(p[4])
p[0] = ast.ExceptHandler(p[2], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''except_clause : EXCEPT test COMMA test'''] = ''' ctx_to_store(p[4])
p[0] = ast.ExceptHandler(p[2], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''suite : simple_stmt'''] = ''' p[0] = p[1]'''
actions['''suite : NEWLINE INDENT suite_plus DEDENT'''] = ''' p[0] = p[3]'''
actions['''suite_plus : stmt'''] = ''' p[0] = p[1]'''
actions['''suite_plus : suite_plus stmt'''] = ''' p[0] = p[1] + p[2]'''
actions['''testlist_safe : old_test'''] = ''' p[0] = p[1]'''
actions['''testlist_safe : old_test testlist_safe_plus'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist_safe : old_test testlist_safe_plus COMMA'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist_safe_plus : COMMA old_test'''] = ''' p[0] = [p[2]]'''
actions['''testlist_safe_plus : testlist_safe_plus COMMA old_test'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''old_test : or_test'''] = ''' p[0] = p[1]'''
actions['''old_test : old_lambdef'''] = ''' p[0] = p[1]'''
actions['''old_lambdef : LAMBDA COLON old_test'''] = ''' p[0] = ast.Lambda(ast.arguments([], None, None, [], rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''old_lambdef : LAMBDA varargslist COLON old_test'''] = ''' p[0] = ast.Lambda(p[2], p[4], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''test : or_test'''] = ''' p[0] = p[1]'''
actions['''test : or_test IF or_test ELSE test'''] = ''' p[0] = ast.IfExp(p[3], p[1], p[5], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''test : lambdef'''] = ''' p[0] = p[1]'''
actions['''or_test : and_test'''] = ''' p[0] = p[1]'''
actions['''or_test : and_test or_test_star'''] = ''' theor = ast.Or(rule=inspect.currentframe().f_code.co_name)
inherit_lineno(theor, p[2][0])
p[0] = ast.BoolOp(theor, [p[1]] + p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''or_test_star : OR and_test'''] = ''' p[0] = [p[2]]'''
actions['''or_test_star : or_test_star OR and_test'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''and_test : not_test'''] = ''' p[0] = p[1]'''
actions['''and_test : not_test and_test_star'''] = ''' theand = ast.And(rule=inspect.currentframe().f_code.co_name)
inherit_lineno(theand, p[2][0])
p[0] = ast.BoolOp(theand, [p[1]] + p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''and_test_star : AND not_test'''] = ''' p[0] = [p[2]]'''
actions['''and_test_star : and_test_star AND not_test'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''not_test : NOT not_test'''] = ''' thenot = ast.Not(rule=inspect.currentframe().f_code.co_name)
inherit_lineno(thenot, p[2])
p[0] = ast.UnaryOp(thenot, p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''not_test : comparison'''] = ''' p[0] = p[1]'''
actions['''comparison : expr'''] = ''' p[0] = p[1]'''
actions['''comparison : expr comparison_star'''] = ''' ops, exprs = p[2]
p[0] = ast.Compare(p[1], ops, exprs, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''comparison_star : comp_op expr'''] = ''' inherit_lineno(p[1], p[2])
p[0] = ([p[1]], [p[2]])'''
actions['''comparison_star : comparison_star comp_op expr'''] = ''' ops, exprs = p[1]
inherit_lineno(p[2], p[3])
p[0] = (ops + [p[2]], exprs + [p[3]])'''
actions['''comp_op : LESS'''] = ''' p[0] = ast.Lt(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : GREATER'''] = ''' p[0] = ast.Gt(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : EQEQUAL'''] = ''' p[0] = ast.Eq(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : GREATEREQUAL'''] = ''' p[0] = ast.GtE(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : LESSEQUAL'''] = ''' p[0] = ast.LtE(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : NOTEQUAL'''] = ''' p[0] = ast.NotEq(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : IN'''] = ''' p[0] = ast.In(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : NOT IN'''] = ''' p[0] = ast.NotIn(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : IS'''] = ''' p[0] = ast.Is(rule=inspect.currentframe().f_code.co_name)'''
actions['''comp_op : IS NOT'''] = ''' p[0] = ast.IsNot(rule=inspect.currentframe().f_code.co_name)'''
actions['''expr : xor_expr'''] = ''' p[0] = p[1]'''
actions['''expr : xor_expr expr_star'''] = ''' p[0] = unwrap_left_associative([p[1]] + p[2], rule=inspect.currentframe().f_code.co_name, alt=len(p[2]) > 2)'''
actions['''expr_star : VBAR xor_expr'''] = ''' p[0] = [ast.BitOr(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''expr_star : expr_star VBAR xor_expr'''] = ''' p[0] = p[1] + [ast.BitOr(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''xor_expr : and_expr'''] = ''' p[0] = p[1]'''
actions['''xor_expr : and_expr xor_expr_star'''] = ''' p[0] = unwrap_left_associative([p[1]] + p[2], rule=inspect.currentframe().f_code.co_name, alt=len(p[2]) > 2)'''
actions['''xor_expr_star : CIRCUMFLEX and_expr'''] = ''' p[0] = [ast.BitXor(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''xor_expr_star : xor_expr_star CIRCUMFLEX and_expr'''] = ''' p[0] = p[1] + [ast.BitXor(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''and_expr : shift_expr'''] = ''' p[0] = p[1]'''
actions['''and_expr : shift_expr and_expr_star'''] = ''' p[0] = unwrap_left_associative([p[1]] + p[2], rule=inspect.currentframe().f_code.co_name, alt=len(p[2]) > 0)'''
actions['''and_expr_star : AMPER shift_expr'''] = ''' p[0] = [ast.BitAnd(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''and_expr_star : and_expr_star AMPER shift_expr'''] = ''' p[0] = p[1] + [ast.BitAnd(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''shift_expr : arith_expr'''] = ''' p[0] = p[1]'''
actions['''shift_expr : arith_expr shift_expr_star'''] = ''' p[0] = unwrap_left_associative([p[1]] + p[2], rule=inspect.currentframe().f_code.co_name, alt=len(p[2]) > 2)'''
actions['''shift_expr_star : LEFTSHIFT arith_expr'''] = ''' p[0] = [ast.LShift(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''shift_expr_star : RIGHTSHIFT arith_expr'''] = ''' p[0] = [ast.RShift(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''shift_expr_star : shift_expr_star LEFTSHIFT arith_expr'''] = ''' p[0] = p[1] + [ast.LShift(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''shift_expr_star : shift_expr_star RIGHTSHIFT arith_expr'''] = ''' p[0] = p[1] + [ast.RShift(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''arith_expr : term'''] = ''' p[0] = p[1]'''
actions['''arith_expr : term arith_expr_star'''] = ''' p[0] = unwrap_left_associative([p[1]] + p[2], rule=inspect.currentframe().f_code.co_name, alt=len(p[2]) > 2)'''
actions['''arith_expr_star : PLUS term'''] = ''' p[0] = [ast.Add(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''arith_expr_star : MINUS term'''] = ''' p[0] = [ast.Sub(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''arith_expr_star : arith_expr_star PLUS term'''] = ''' p[0] = p[1] + [ast.Add(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''arith_expr_star : arith_expr_star MINUS term'''] = ''' p[0] = p[1] + [ast.Sub(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''term : factor'''] = ''' p[0] = p[1]'''
actions['''term : factor term_star'''] = ''' p[0] = unwrap_left_associative([p[1]] + p[2], rule=inspect.currentframe().f_code.co_name, alt=len(p[2]) > 2)'''
actions['''term_star : STAR factor'''] = ''' p[0] = [ast.Mult(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''term_star : SLASH factor'''] = ''' p[0] = [ast.Div(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''term_star : PERCENT factor'''] = ''' p[0] = [ast.Mod(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''term_star : DOUBLESLASH factor'''] = ''' p[0] = [ast.FloorDiv(rule=inspect.currentframe().f_code.co_name, **p[1][1]), p[2]]'''
actions['''term_star : term_star STAR factor'''] = ''' p[0] = p[1] + [ast.Mult(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''term_star : term_star SLASH factor'''] = ''' p[0] = p[1] + [ast.Div(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''term_star : term_star PERCENT factor'''] = ''' p[0] = p[1] + [ast.Mod(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''term_star : term_star DOUBLESLASH factor'''] = ''' p[0] = p[1] + [ast.FloorDiv(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3]]'''
actions['''factor : PLUS factor'''] = ''' op = ast.UAdd(rule=inspect.currentframe().f_code.co_name, **p[1][1])
p[0] = ast.UnaryOp(op, p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], op)'''
actions['''factor : MINUS factor'''] = ''' if isinstance(p[2], ast.Num) and not hasattr(p[2], "unary"):
p[2].n *= -1
p[0] = p[2]
p[0].unary = True
inherit_lineno(p[0], p[1][1])
else:
op = ast.USub(rule=inspect.currentframe().f_code.co_name, **p[1][1])
p[0] = ast.UnaryOp(op, p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], op)'''
actions['''factor : TILDE factor'''] = ''' op = ast.Invert(rule=inspect.currentframe().f_code.co_name, **p[1][1])
p[0] = ast.UnaryOp(op, p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], op)'''
actions['''factor : power'''] = ''' p[0] = p[1]'''
actions['''power : atom'''] = ''' p[0] = p[1]'''
actions['''power : atom DOUBLESTAR factor'''] = ''' p[0] = ast.BinOp(p[1], ast.Pow(rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''power : atom power_star'''] = ''' p[0] = unpack_trailer(p[1], p[2])'''
actions['''power : atom power_star DOUBLESTAR factor'''] = ''' p[0] = ast.BinOp(unpack_trailer(p[1], p[2]), ast.Pow(rule=inspect.currentframe().f_code.co_name, **p[3][1]), p[4], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''power_star : trailer'''] = ''' p[0] = [p[1]]'''
actions['''power_star : power_star trailer'''] = ''' p[0] = p[1] + [p[2]]'''
actions['''atom : LPAR RPAR'''] = ''' p[0] = ast.Tuple([], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=True, **p[1][1])'''
actions['''atom : LPAR yield_expr RPAR'''] = ''' p[0] = p[2]
if isinstance(p[0], ast.Tuple):
p[0].paren = True
p[0].alt = p[1][1]'''
actions['''atom : LPAR testlist_comp RPAR'''] = ''' p[0] = p[2]
if isinstance(p[0], ast.Tuple):
p[0].paren = True
p[0].alt = p[1][1]'''
actions['''atom : LSQB RSQB'''] = ''' p[0] = ast.List([], ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''atom : LSQB listmaker RSQB'''] = ''' if isinstance(p[2], ast.ListComp):
p[0] = p[2]
p[0].alt = p[1][1]
else:
p[0] = ast.List(p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''atom : LBRACE RBRACE'''] = ''' p[0] = ast.Dict([], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''atom : LBRACE dictorsetmaker RBRACE'''] = ''' if isinstance(p[2], (ast.SetComp, ast.DictComp)):
p[0] = p[2]
p[0].alt = p[1][1]
else:
keys, values = p[2]
if keys is None:
p[0] = ast.Set(values, rule=inspect.currentframe().f_code.co_name, **p[1][1])
else:
p[0] = ast.Dict(keys, values, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''atom : BACKQUOTE testlist1 BACKQUOTE'''] = ''' p[0] = ast.Repr(p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''atom : NAME'''] = ''' p[0] = ast.Name(p[1][0], ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''atom : NUMBER'''] = ''' p[0] = ast.Num(p[1][0], rule=inspect.currentframe().f_code.co_name, **p[1][2])'''
actions['''atom : atom_plus'''] = ''' p[0] = p[1]'''
actions['''atom_plus : STRING'''] = ''' p[0] = ast.Str(p[1][0], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''atom_plus : atom_plus STRING'''] = ''' p[1].s = p[1].s + p[2][0]
p[0] = p[1]'''
actions['''listmaker : test list_for'''] = ''' p[0] = ast.ListComp(p[1], p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''listmaker : test'''] = ''' p[0] = [p[1]]'''
actions['''listmaker : test COMMA'''] = ''' p[0] = [p[1]]'''
actions['''listmaker : test listmaker_star'''] = ''' p[0] = [p[1]] + p[2]'''
actions['''listmaker : test listmaker_star COMMA'''] = ''' p[0] = [p[1]] + p[2]'''
actions['''listmaker_star : COMMA test'''] = ''' p[0] = [p[2]]'''
actions['''listmaker_star : listmaker_star COMMA test'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''testlist_comp : test comp_for'''] = ''' p[0] = ast.GeneratorExp(p[1], p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''testlist_comp : test'''] = ''' p[0] = p[1]'''
actions['''testlist_comp : test COMMA'''] = ''' p[0] = ast.Tuple([p[1]], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist_comp : test testlist_comp_star'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist_comp : test testlist_comp_star COMMA'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist_comp_star : COMMA test'''] = ''' p[0] = [p[2]]'''
actions['''testlist_comp_star : testlist_comp_star COMMA test'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''lambdef : LAMBDA COLON test'''] = ''' p[0] = ast.Lambda(ast.arguments([], None, None, [], rule=inspect.currentframe().f_code.co_name, **p[2][1]), p[3], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''lambdef : LAMBDA varargslist COLON test'''] = ''' p[0] = ast.Lambda(p[2], p[4], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''trailer : LPAR RPAR'''] = ''' p[0] = ast.Call(None, [], [], None, None, rule=inspect.currentframe().f_code.co_name)'''
actions['''trailer : LPAR arglist RPAR'''] = ''' p[0] = p[2]'''
actions['''trailer : LSQB subscriptlist RSQB'''] = ''' p[0] = ast.Subscript(None, p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name)'''
actions['''trailer : DOT NAME'''] = ''' p[0] = ast.Attribute(None, p[2][0], ast.Load(), rule=inspect.currentframe().f_code.co_name)'''
actions['''subscriptlist : subscript'''] = ''' p[0] = p[1]'''
actions['''subscriptlist : subscript COMMA'''] = ''' if isinstance(p[1], ast.Index):
tup = ast.Tuple([p[1].value], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(tup, p[1].value)
p[0] = ast.Index(tup, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], tup)
else:
p[0] = ast.ExtSlice([p[1]], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''subscriptlist : subscript subscriptlist_star'''] = ''' args = [p[1]] + p[2]
if all(isinstance(x, ast.Index) for x in args):
tup = ast.Tuple([x.value for x in args], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(tup, args[0].value)
p[0] = ast.Index(tup, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], tup)
else:
p[0] = ast.ExtSlice(args, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''subscriptlist : subscript subscriptlist_star COMMA'''] = ''' args = [p[1]] + p[2]
if all(isinstance(x, ast.Index) for x in args):
tup = ast.Tuple([x.value for x in args], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(tup, args[0].value)
p[0] = ast.Index(tup, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], tup)
else:
p[0] = ast.ExtSlice(args, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''subscriptlist_star : COMMA subscript'''] = ''' p[0] = [p[2]]'''
actions['''subscriptlist_star : subscriptlist_star COMMA subscript'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''subscript : DOT DOT DOT'''] = ''' p[0] = ast.Ellipsis(rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''subscript : test'''] = ''' p[0] = ast.Index(p[1], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''subscript : COLON'''] = ''' p[0] = ast.Slice(None, None, None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''subscript : COLON sliceop'''] = ''' p[0] = ast.Slice(None, None, p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''subscript : COLON test'''] = ''' p[0] = ast.Slice(None, p[2], None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''subscript : COLON test sliceop'''] = ''' p[0] = ast.Slice(None, p[2], p[3], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''subscript : test COLON'''] = ''' p[0] = ast.Slice(p[1], None, None, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''subscript : test COLON sliceop'''] = ''' p[0] = ast.Slice(p[1], None, p[3], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''subscript : test COLON test'''] = ''' p[0] = ast.Slice(p[1], p[3], None, rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''subscript : test COLON test sliceop'''] = ''' p[0] = ast.Slice(p[1], p[3], p[4], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''sliceop : COLON'''] = ''' p[0] = ast.Name("None", ast.Load(), rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''sliceop : COLON test'''] = ''' p[0] = p[2]'''
actions['''exprlist : expr'''] = ''' p[0] = p[1]'''
actions['''exprlist : expr COMMA'''] = ''' p[0] = ast.Tuple([p[1]], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''exprlist : expr exprlist_star'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''exprlist : expr exprlist_star COMMA'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''exprlist_star : COMMA expr'''] = ''' p[0] = [p[2]]'''
actions['''exprlist_star : exprlist_star COMMA expr'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''testlist : test'''] = ''' p[0] = p[1]'''
actions['''testlist : test COMMA'''] = ''' p[0] = ast.Tuple([p[1]], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist : test testlist_star'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist : test testlist_star COMMA'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist_star : COMMA test'''] = ''' p[0] = [p[2]]'''
actions['''testlist_star : testlist_star COMMA test'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''dictorsetmaker : test COLON test comp_for'''] = ''' p[0] = ast.DictComp(p[1], p[3], p[4], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''dictorsetmaker : test COLON test'''] = ''' p[0] = ([p[1]], [p[3]])'''
actions['''dictorsetmaker : test COLON test COMMA'''] = ''' p[0] = ([p[1]], [p[3]])'''
actions['''dictorsetmaker : test COLON test dictorsetmaker_star'''] = ''' keys, values = p[4]
p[0] = ([p[1]] + keys, [p[3]] + values)'''
actions['''dictorsetmaker : test COLON test dictorsetmaker_star COMMA'''] = ''' keys, values = p[4]
p[0] = ([p[1]] + keys, [p[3]] + values)'''
actions['''dictorsetmaker : test comp_for'''] = ''' p[0] = ast.SetComp(p[1], p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''dictorsetmaker : test'''] = ''' p[0] = (None, [p[1]])'''
actions['''dictorsetmaker : test COMMA'''] = ''' p[0] = (None, [p[1]])'''
actions['''dictorsetmaker : test dictorsetmaker_star2'''] = ''' keys, values = p[2]
p[0] = (keys, [p[1]] + values)'''
actions['''dictorsetmaker : test dictorsetmaker_star2 COMMA'''] = ''' keys, values = p[2]
p[0] = (keys, [p[1]] + values)'''
actions['''dictorsetmaker_star : COMMA test COLON test'''] = ''' p[0] = ([p[2]], [p[4]])'''
actions['''dictorsetmaker_star : dictorsetmaker_star COMMA test COLON test'''] = ''' keys, values = p[1]
p[0] = (keys + [p[3]], values + [p[5]])'''
actions['''dictorsetmaker_star2 : COMMA test'''] = ''' p[0] = (None, [p[2]])'''
actions['''dictorsetmaker_star2 : dictorsetmaker_star2 COMMA test'''] = ''' keys, values = p[1]
p[0] = (keys, values + [p[3]])'''
actions['''classdef : CLASS NAME COLON suite'''] = ''' p[0] = ast.ClassDef(p[2][0], [], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''classdef : CLASS NAME LPAR RPAR COLON suite'''] = ''' p[0] = ast.ClassDef(p[2][0], [], p[6], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''classdef : CLASS NAME LPAR testlist RPAR COLON suite'''] = ''' if isinstance(p[4], ast.Tuple):
p[0] = ast.ClassDef(p[2][0], p[4].elts, p[7], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])
else:
p[0] = ast.ClassDef(p[2][0], [p[4]], p[7], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''arglist : argument'''] = ''' if notkeyword(p[1]):
p[0] = ast.Call(None, [p[1]], [], None, None, rule=inspect.currentframe().f_code.co_name)
else:
p[0] = ast.Call(None, [], [p[1]], None, None, rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : argument COMMA'''] = ''' if notkeyword(p[1]):
p[0] = ast.Call(None, [p[1]], [], None, None, rule=inspect.currentframe().f_code.co_name)
else:
p[0] = ast.Call(None, [], [p[1]], None, None, rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : STAR test'''] = ''' p[0] = ast.Call(None, [], [], p[2], None, rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : STAR test COMMA DOUBLESTAR test'''] = ''' p[0] = ast.Call(None, [], [], p[2], p[5], rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : STAR test arglist_star'''] = ''' p[0] = ast.Call(None, filter(notkeyword, p[3]), filter(iskeyword, p[3]), p[2], None, rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : STAR test arglist_star COMMA DOUBLESTAR test'''] = ''' p[0] = ast.Call(None, filter(notkeyword, p[3]), filter(iskeyword, p[3]), p[2], p[6], rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : DOUBLESTAR test'''] = ''' p[0] = ast.Call(None, [], [], None, p[2], rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : arglist_star2 argument'''] = ''' args = p[1] + [p[2]]
p[0] = ast.Call(None, filter(notkeyword, args), filter(iskeyword, args), None, None, rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : arglist_star2 argument COMMA'''] = ''' args = p[1] + [p[2]]
p[0] = ast.Call(None, filter(notkeyword, args), filter(iskeyword, args), None, None, rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : arglist_star2 STAR test'''] = ''' p[0] = ast.Call(None, filter(notkeyword, p[1]), filter(iskeyword, p[1]), p[3], None, rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : arglist_star2 STAR test COMMA DOUBLESTAR test'''] = ''' p[0] = ast.Call(None, filter(notkeyword, p[1]), filter(iskeyword, p[1]), p[3], p[6], rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : arglist_star2 STAR test arglist_star3'''] = ''' args = p[1] + p[4]
p[0] = ast.Call(None, filter(notkeyword, args), filter(iskeyword, args), p[3], None, rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : arglist_star2 STAR test arglist_star3 COMMA DOUBLESTAR test'''] = ''' args = p[1] + p[4]
p[0] = ast.Call(None, filter(notkeyword, args), filter(iskeyword, args), p[3], p[7], rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist : arglist_star2 DOUBLESTAR test'''] = ''' p[0] = ast.Call(None, filter(notkeyword, p[1]), filter(iskeyword, p[1]), None, p[3], rule=inspect.currentframe().f_code.co_name)'''
actions['''arglist_star : COMMA argument'''] = ''' p[0] = [p[2]]'''
actions['''arglist_star : arglist_star COMMA argument'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''arglist_star3 : COMMA argument'''] = ''' p[0] = [p[2]]'''
actions['''arglist_star3 : arglist_star3 COMMA argument'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''arglist_star2 : argument COMMA'''] = ''' p[0] = [p[1]]'''
actions['''arglist_star2 : arglist_star2 argument COMMA'''] = ''' p[0] = p[1] + [p[2]]'''
actions['''argument : test'''] = ''' p[0] = p[1]'''
actions['''argument : test comp_for'''] = ''' p[0] = ast.GeneratorExp(p[1], p[2], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''argument : test EQUAL test'''] = ''' p[0] = ast.keyword(p[1].id, p[3], rule=inspect.currentframe().f_code.co_name)
inherit_lineno(p[0], p[1])'''
actions['''list_iter : list_for'''] = ''' p[0] = ([], p[1])'''
actions['''list_iter : list_if'''] = ''' p[0] = p[1]'''
actions['''list_for : FOR exprlist IN testlist_safe'''] = ''' ctx_to_store(p[2])
p[0] = [ast.comprehension(p[2], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])]'''
actions['''list_for : FOR exprlist IN testlist_safe list_iter'''] = ''' ctx_to_store(p[2])
ifs, iters = p[5]
p[0] = [ast.comprehension(p[2], p[4], ifs, rule=inspect.currentframe().f_code.co_name, **p[1][1])] + iters'''
actions['''list_if : IF old_test'''] = ''' p[0] = ([p[2]], [])'''
actions['''list_if : IF old_test list_iter'''] = ''' ifs, iters = p[3]
p[0] = ([p[2]] + ifs, iters)'''
actions['''comp_iter : comp_for'''] = ''' p[0] = ([], p[1])'''
actions['''comp_iter : comp_if'''] = ''' p[0] = p[1]'''
actions['''comp_for : FOR exprlist IN or_test'''] = ''' ctx_to_store(p[2])
p[0] = [ast.comprehension(p[2], p[4], [], rule=inspect.currentframe().f_code.co_name, **p[1][1])]'''
actions['''comp_for : FOR exprlist IN or_test comp_iter'''] = ''' ctx_to_store(p[2])
ifs, iters = p[5]
p[0] = [ast.comprehension(p[2], p[4], ifs, rule=inspect.currentframe().f_code.co_name, **p[1][1])] + iters'''
actions['''comp_if : IF old_test'''] = ''' p[0] = ([p[2]], [])'''
actions['''comp_if : IF old_test comp_iter'''] = ''' ifs, iters = p[3]
p[0] = ([p[2]] + ifs, iters)'''
actions['''testlist1 : test'''] = ''' p[0] = p[1]'''
actions['''testlist1 : test testlist1_star'''] = ''' p[0] = ast.Tuple([p[1]] + p[2], ast.Load(), rule=inspect.currentframe().f_code.co_name, paren=False)
inherit_lineno(p[0], p[1])'''
actions['''testlist1_star : COMMA test'''] = ''' p[0] = [p[2]]'''
actions['''testlist1_star : testlist1_star COMMA test'''] = ''' p[0] = p[1] + [p[3]]'''
actions['''encoding_decl : NAME'''] = ''' p[0] = p[1]'''
actions['''yield_expr : YIELD'''] = ''' p[0] = ast.Yield(None, rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
actions['''yield_expr : YIELD testlist'''] = ''' p[0] = ast.Yield(p[2], rule=inspect.currentframe().f_code.co_name, **p[1][1])'''
| 82.03592
| 278
| 0.592483
| 8,891
| 57,097
| 3.659318
| 0.032617
| 0.030367
| 0.178147
| 0.185892
| 0.902597
| 0.875027
| 0.835101
| 0.783218
| 0.720824
| 0.687106
| 0
| 0.034882
| 0.143423
| 57,097
| 695
| 279
| 82.153957
| 0.630347
| 0.000578
| 0
| 0.312046
| 0
| 0.339623
| 0.824766
| 0.231507
| 0
| 0
| 0
| 0
| 0.004354
| 1
| 0
| false
| 0.002903
| 0.04209
| 0
| 0.04209
| 0.015965
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2ccb014fc1ba28555d34168d881760e003be5657
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_quinn/na_quinn_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_quinn/na_quinn_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_quinn/na_quinn_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Quinn_Jng_Aatrox(Ratings):
pass
class NA_Quinn_Jng_Ahri(Ratings):
pass
class NA_Quinn_Jng_Akali(Ratings):
pass
class NA_Quinn_Jng_Alistar(Ratings):
pass
class NA_Quinn_Jng_Amumu(Ratings):
pass
class NA_Quinn_Jng_Anivia(Ratings):
pass
class NA_Quinn_Jng_Annie(Ratings):
pass
class NA_Quinn_Jng_Ashe(Ratings):
pass
class NA_Quinn_Jng_AurelionSol(Ratings):
pass
class NA_Quinn_Jng_Azir(Ratings):
pass
class NA_Quinn_Jng_Bard(Ratings):
pass
class NA_Quinn_Jng_Blitzcrank(Ratings):
pass
class NA_Quinn_Jng_Brand(Ratings):
pass
class NA_Quinn_Jng_Braum(Ratings):
pass
class NA_Quinn_Jng_Caitlyn(Ratings):
pass
class NA_Quinn_Jng_Camille(Ratings):
pass
class NA_Quinn_Jng_Cassiopeia(Ratings):
pass
class NA_Quinn_Jng_Chogath(Ratings):
pass
class NA_Quinn_Jng_Corki(Ratings):
pass
class NA_Quinn_Jng_Darius(Ratings):
pass
class NA_Quinn_Jng_Diana(Ratings):
pass
class NA_Quinn_Jng_Draven(Ratings):
pass
class NA_Quinn_Jng_DrMundo(Ratings):
pass
class NA_Quinn_Jng_Ekko(Ratings):
pass
class NA_Quinn_Jng_Elise(Ratings):
pass
class NA_Quinn_Jng_Evelynn(Ratings):
pass
class NA_Quinn_Jng_Ezreal(Ratings):
pass
class NA_Quinn_Jng_Fiddlesticks(Ratings):
pass
class NA_Quinn_Jng_Fiora(Ratings):
pass
class NA_Quinn_Jng_Fizz(Ratings):
pass
class NA_Quinn_Jng_Galio(Ratings):
pass
class NA_Quinn_Jng_Gangplank(Ratings):
pass
class NA_Quinn_Jng_Garen(Ratings):
pass
class NA_Quinn_Jng_Gnar(Ratings):
pass
class NA_Quinn_Jng_Gragas(Ratings):
pass
class NA_Quinn_Jng_Graves(Ratings):
pass
class NA_Quinn_Jng_Hecarim(Ratings):
pass
class NA_Quinn_Jng_Heimerdinger(Ratings):
pass
class NA_Quinn_Jng_Illaoi(Ratings):
pass
class NA_Quinn_Jng_Irelia(Ratings):
pass
class NA_Quinn_Jng_Ivern(Ratings):
pass
class NA_Quinn_Jng_Janna(Ratings):
pass
class NA_Quinn_Jng_JarvanIV(Ratings):
pass
class NA_Quinn_Jng_Jax(Ratings):
pass
class NA_Quinn_Jng_Jayce(Ratings):
pass
class NA_Quinn_Jng_Jhin(Ratings):
pass
class NA_Quinn_Jng_Jinx(Ratings):
pass
class NA_Quinn_Jng_Kalista(Ratings):
pass
class NA_Quinn_Jng_Karma(Ratings):
pass
class NA_Quinn_Jng_Karthus(Ratings):
pass
class NA_Quinn_Jng_Kassadin(Ratings):
pass
class NA_Quinn_Jng_Katarina(Ratings):
pass
class NA_Quinn_Jng_Kayle(Ratings):
pass
class NA_Quinn_Jng_Kayn(Ratings):
pass
class NA_Quinn_Jng_Kennen(Ratings):
pass
class NA_Quinn_Jng_Khazix(Ratings):
pass
class NA_Quinn_Jng_Kindred(Ratings):
pass
class NA_Quinn_Jng_Kled(Ratings):
pass
class NA_Quinn_Jng_KogMaw(Ratings):
pass
class NA_Quinn_Jng_Leblanc(Ratings):
pass
class NA_Quinn_Jng_LeeSin(Ratings):
pass
class NA_Quinn_Jng_Leona(Ratings):
pass
class NA_Quinn_Jng_Lissandra(Ratings):
pass
class NA_Quinn_Jng_Lucian(Ratings):
pass
class NA_Quinn_Jng_Lulu(Ratings):
pass
class NA_Quinn_Jng_Lux(Ratings):
pass
class NA_Quinn_Jng_Malphite(Ratings):
pass
class NA_Quinn_Jng_Malzahar(Ratings):
pass
class NA_Quinn_Jng_Maokai(Ratings):
pass
class NA_Quinn_Jng_MasterYi(Ratings):
pass
class NA_Quinn_Jng_MissFortune(Ratings):
pass
class NA_Quinn_Jng_MonkeyKing(Ratings):
pass
class NA_Quinn_Jng_Mordekaiser(Ratings):
pass
class NA_Quinn_Jng_Morgana(Ratings):
pass
class NA_Quinn_Jng_Nami(Ratings):
pass
class NA_Quinn_Jng_Nasus(Ratings):
pass
class NA_Quinn_Jng_Nautilus(Ratings):
pass
class NA_Quinn_Jng_Nidalee(Ratings):
pass
class NA_Quinn_Jng_Nocturne(Ratings):
pass
class NA_Quinn_Jng_Nunu(Ratings):
pass
class NA_Quinn_Jng_Olaf(Ratings):
pass
class NA_Quinn_Jng_Orianna(Ratings):
pass
class NA_Quinn_Jng_Ornn(Ratings):
pass
class NA_Quinn_Jng_Pantheon(Ratings):
pass
class NA_Quinn_Jng_Poppy(Ratings):
pass
class NA_Quinn_Jng_Quinn(Ratings):
pass
class NA_Quinn_Jng_Rakan(Ratings):
pass
class NA_Quinn_Jng_Rammus(Ratings):
pass
class NA_Quinn_Jng_RekSai(Ratings):
pass
class NA_Quinn_Jng_Renekton(Ratings):
pass
class NA_Quinn_Jng_Rengar(Ratings):
pass
class NA_Quinn_Jng_Riven(Ratings):
pass
class NA_Quinn_Jng_Rumble(Ratings):
pass
class NA_Quinn_Jng_Ryze(Ratings):
pass
class NA_Quinn_Jng_Sejuani(Ratings):
pass
class NA_Quinn_Jng_Shaco(Ratings):
pass
class NA_Quinn_Jng_Shen(Ratings):
pass
class NA_Quinn_Jng_Shyvana(Ratings):
pass
class NA_Quinn_Jng_Singed(Ratings):
pass
class NA_Quinn_Jng_Sion(Ratings):
pass
class NA_Quinn_Jng_Sivir(Ratings):
pass
class NA_Quinn_Jng_Skarner(Ratings):
pass
class NA_Quinn_Jng_Sona(Ratings):
pass
class NA_Quinn_Jng_Soraka(Ratings):
pass
class NA_Quinn_Jng_Swain(Ratings):
pass
class NA_Quinn_Jng_Syndra(Ratings):
pass
class NA_Quinn_Jng_TahmKench(Ratings):
pass
class NA_Quinn_Jng_Taliyah(Ratings):
pass
class NA_Quinn_Jng_Talon(Ratings):
pass
class NA_Quinn_Jng_Taric(Ratings):
pass
class NA_Quinn_Jng_Teemo(Ratings):
pass
class NA_Quinn_Jng_Thresh(Ratings):
pass
class NA_Quinn_Jng_Tristana(Ratings):
pass
class NA_Quinn_Jng_Trundle(Ratings):
pass
class NA_Quinn_Jng_Tryndamere(Ratings):
pass
class NA_Quinn_Jng_TwistedFate(Ratings):
pass
class NA_Quinn_Jng_Twitch(Ratings):
pass
class NA_Quinn_Jng_Udyr(Ratings):
pass
class NA_Quinn_Jng_Urgot(Ratings):
pass
class NA_Quinn_Jng_Varus(Ratings):
pass
class NA_Quinn_Jng_Vayne(Ratings):
pass
class NA_Quinn_Jng_Veigar(Ratings):
pass
class NA_Quinn_Jng_Velkoz(Ratings):
pass
class NA_Quinn_Jng_Vi(Ratings):
pass
class NA_Quinn_Jng_Viktor(Ratings):
pass
class NA_Quinn_Jng_Vladimir(Ratings):
pass
class NA_Quinn_Jng_Volibear(Ratings):
pass
class NA_Quinn_Jng_Warwick(Ratings):
pass
class NA_Quinn_Jng_Xayah(Ratings):
pass
class NA_Quinn_Jng_Xerath(Ratings):
pass
class NA_Quinn_Jng_XinZhao(Ratings):
pass
class NA_Quinn_Jng_Yasuo(Ratings):
pass
class NA_Quinn_Jng_Yorick(Ratings):
pass
class NA_Quinn_Jng_Zac(Ratings):
pass
class NA_Quinn_Jng_Zed(Ratings):
pass
class NA_Quinn_Jng_Ziggs(Ratings):
pass
class NA_Quinn_Jng_Zilean(Ratings):
pass
class NA_Quinn_Jng_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
e2f0e638e030fe54a88224133f9d5efc74321f1c
| 126
|
py
|
Python
|
codenode/__init__.py
|
0xf0f/codenode
|
fa36ba5e2eeb42e95c8fc33afd4f1bf131ba6d9b
|
[
"MIT"
] | 3
|
2019-06-27T04:57:37.000Z
|
2019-06-27T11:29:33.000Z
|
codenode/__init__.py
|
0xf0f/codenode
|
fa36ba5e2eeb42e95c8fc33afd4f1bf131ba6d9b
|
[
"MIT"
] | null | null | null |
codenode/__init__.py
|
0xf0f/codenode
|
fa36ba5e2eeb42e95c8fc33afd4f1bf131ba6d9b
|
[
"MIT"
] | null | null | null |
from .base import CodeNode
from .base import Line
from .base import EmptyLines
from .base import File
from .base import Block
| 21
| 28
| 0.801587
| 20
| 126
| 5.05
| 0.4
| 0.39604
| 0.693069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15873
| 126
| 5
| 29
| 25.2
| 0.95283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3925f7fb63a78997fb6e053972e16547e5ea8f78
| 27
|
py
|
Python
|
lab1/lab1/models/__init__.py
|
ZerocksX/Service-Oriented-Computing-2019
|
eac6b0e9a40eed76b452f6524fd899e7107b0f69
|
[
"Apache-2.0"
] | null | null | null |
lab1/lab1/models/__init__.py
|
ZerocksX/Service-Oriented-Computing-2019
|
eac6b0e9a40eed76b452f6524fd899e7107b0f69
|
[
"Apache-2.0"
] | null | null | null |
lab1/lab1/models/__init__.py
|
ZerocksX/Service-Oriented-Computing-2019
|
eac6b0e9a40eed76b452f6524fd899e7107b0f69
|
[
"Apache-2.0"
] | null | null | null |
def user():
return None
| 13.5
| 15
| 0.62963
| 4
| 27
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 27
| 2
| 15
| 13.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1a8193846fff6d9d565706505c3c7635537924dc
| 32,559
|
py
|
Python
|
interface/new/SysTrayIcon.py
|
AsiganTheSunk/python-torrent-scrapper
|
30f27962e795840b82d47398e05664429829ff2b
|
[
"Apache-2.0"
] | 5
|
2018-05-19T06:18:01.000Z
|
2020-01-14T23:17:30.000Z
|
interface/new/SysTrayIcon.py
|
AsiganTheSunk/python-torrent-scrapper
|
30f27962e795840b82d47398e05664429829ff2b
|
[
"Apache-2.0"
] | 9
|
2018-05-24T01:02:46.000Z
|
2020-02-13T22:35:43.000Z
|
interface/new/SysTrayIcon.py
|
AsiganTheSunk/python-torrent-scrapper
|
30f27962e795840b82d47398e05664429829ff2b
|
[
"Apache-2.0"
] | null | null | null |
<html><head>
<meta http-equiv="content-type" content="text/html; charset=windows-1252">
<title>SysTrayIcon.py</title>
<style type="text/css"><!--
.syntax0 {
color: #000000;
}
.syntax1 {
color: #009900;
}
.syntax2 {
color: #6eb357;
font-weight: bold;
font-style: italic;
}
.syntax3 {
color: #cc3300;
}
.syntax4 {
color: #cc6600;
}
.syntax5 {
color: #008080;
}
.syntax6 {
color: #000099;
}
.syntax7 {
color: #ff0000;
font-weight: bold;
}
.syntax8 {
color: #0033cc;
}
.syntax9 {
color: #006600;
}
.syntax10 {
color: #660099;
}
.syntax11 {
color: #66ccff;
font-weight: bold;
}
.syntax12 {
color: #990033;
font-weight: bold;
font-style: italic;
}
.syntax13 {
color: #7c0000;
}
.syntax14 {
color: #770077;
}
.syntax15 {
color: #9900cc;
}
.syntax16 {
color: #6600cc;
}
.syntax17 {
color: #4065fc;
}
.syntax18 {
color: #9933ff;
}
-->
</style>
</head>
<body bgcolor="#FFFFFF">
<pre><span class="syntax1">#</span><span class="syntax1">!/usr/bin/env</span><span class="syntax1"> </span><span class="syntax1">python</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Module</span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1">:</span><span class="syntax1"> </span><span class="syntax1">SysTrayIcon.py</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Synopsis</span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1">:</span><span class="syntax1"> </span><span class="syntax1">Windows</span><span class="syntax1"> </span><span class="syntax1">System</span><span class="syntax1"> </span><span class="syntax1">tray</span><span class="syntax1"> </span><span class="syntax1">icon.</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Programmer</span><span class="syntax1"> </span><span class="syntax1">:</span><span class="syntax1"> </span><span class="syntax1">Simon</span><span class="syntax1"> </span><span class="syntax1">Brunning</span><span class="syntax1"> </span><span class="syntax1">-</span><span class="syntax1"> </span><span class="syntax1">simon@brunningonline.net</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Date</span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1">:</span><span class="syntax1"> </span><span class="syntax1">11</span><span class="syntax1"> </span><span class="syntax1">April</span><span class="syntax1"> </span><span class="syntax1">2005</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Notes</span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1">:</span><span class="syntax1"> </span><span class="syntax1">Based</span><span class="syntax1"> </span><span class="syntax1">on</span><span class="syntax1"> </span><span class="syntax1">(i.e.</span><span class="syntax1"> </span><span class="syntax1">ripped</span><span class="syntax1"> </span><span class="syntax1">off</span><span class="syntax1"> </span><span class="syntax1">from)</span><span class="syntax1"> </span><span class="syntax1">Mark</span><span class="syntax1"> </span><span class="syntax1">Hammond's</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1">win32gui_taskbar.py</span><span class="syntax1"> </span><span class="syntax1">and</span><span class="syntax1"> </span><span class="syntax1">win32gui_menu.py</span><span class="syntax1"> </span><span class="syntax1">demos</span><span class="syntax1"> </span><span class="syntax1">from</span><span class="syntax1"> </span><span class="syntax1">PyWin32</span>
<span class="syntax14">'''</span><span class="syntax14">TODO</span>
<span class="syntax14">For</span><span class="syntax14"> </span><span class="syntax14">now</span><span class="syntax14">,</span><span class="syntax14"> </span><span class="syntax14">the</span><span class="syntax14"> </span><span class="syntax14">demo</span><span class="syntax14"> </span><span class="syntax14">at</span><span class="syntax14"> </span><span class="syntax14">the</span><span class="syntax14"> </span><span class="syntax14">bottom</span><span class="syntax14"> </span><span class="syntax14">shows</span><span class="syntax14"> </span><span class="syntax14">how</span><span class="syntax14"> </span><span class="syntax14">to</span><span class="syntax14"> </span><span class="syntax14">use</span><span class="syntax14"> </span><span class="syntax14">it</span><span class="syntax14">.</span><span class="syntax14">.</span><span class="syntax14">.</span><span class="syntax14">'''</span>
<span class="syntax8">import</span> os
<span class="syntax8">import</span> sys
<span class="syntax8">import</span> win32api
<span class="syntax8">import</span> win32con
<span class="syntax8">import</span> win32gui_struct
<span class="syntax8">try</span>:
<span class="syntax8">import</span> winxpgui <span class="syntax8">as</span> win32gui
<span class="syntax8">except</span> <span class="syntax10">ImportError</span>:
<span class="syntax8">import</span> win32gui
<span class="syntax8">class</span> <span class="syntax6">SysTrayIcon</span>(<span class="syntax9">object</span>):
<span class="syntax14">'''</span><span class="syntax14">TODO</span><span class="syntax14">'''</span>
QUIT <span class="syntax18">=</span> <span class="syntax13">'</span><span class="syntax13">QUIT</span><span class="syntax13">'</span>
SPECIAL_ACTIONS <span class="syntax18">=</span> [QUIT]
FIRST_ID <span class="syntax18">=</span> <span class="syntax5">1023</span>
<span class="syntax8">def</span> <span class="syntax10">__init__</span>(self,
icon,
hover_text,
menu_options,
on_quit<span class="syntax18">=</span><span class="syntax10">None</span>,
default_menu_index<span class="syntax18">=</span><span class="syntax10">None</span>,
window_class_name<span class="syntax18">=</span><span class="syntax10">None</span>,):
self.icon <span class="syntax18">=</span> icon
self.hover_text <span class="syntax18">=</span> hover_text
self.on_quit <span class="syntax18">=</span> on_quit
menu_options <span class="syntax18">=</span> menu_options <span class="syntax18">+</span> ((<span class="syntax13">'</span><span class="syntax13">Quit</span><span class="syntax13">'</span>, <span class="syntax10">None</span>, self.QUIT),)
self._next_action_id <span class="syntax18">=</span> self.FIRST_ID
self.menu_actions_by_id <span class="syntax18">=</span> <span class="syntax9">set</span>()
self.menu_options <span class="syntax18">=</span> self.<span class="syntax6">_add_ids_to_menu_options</span>(<span class="syntax9">list</span>(menu_options))
self.menu_actions_by_id <span class="syntax18">=</span> <span class="syntax9">dict</span>(self.menu_actions_by_id)
<span class="syntax8">del</span> self._next_action_id
self.default_menu_index <span class="syntax18">=</span> (default_menu_index <span class="syntax8">or</span> <span class="syntax5">0</span>)
self.window_class_name <span class="syntax18">=</span> window_class_name <span class="syntax8">or</span> <span class="syntax13">"</span><span class="syntax13">SysTrayIconPy</span><span class="syntax13">"</span>
message_map <span class="syntax18">=</span> {win32gui.<span class="syntax6">RegisterWindowMessage</span>(<span class="syntax13">"</span><span class="syntax13">TaskbarCreated</span><span class="syntax13">"</span>): self.restart,
win32con.WM_DESTROY: self.destroy,
win32con.WM_COMMAND: self.command,
win32con.WM_USER<span class="syntax18">+</span><span class="syntax5">20</span> : self.notify,}
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Register</span><span class="syntax1"> </span><span class="syntax1">the</span><span class="syntax1"> </span><span class="syntax1">Window</span><span class="syntax1"> </span><span class="syntax1">class.</span>
window_class <span class="syntax18">=</span> win32gui.<span class="syntax6">WNDCLASS</span>()
hinst <span class="syntax18">=</span> window_class.hInstance <span class="syntax18">=</span> win32gui.<span class="syntax6">GetModuleHandle</span>(<span class="syntax10">None</span>)
window_class.lpszClassName <span class="syntax18">=</span> self.window_class_name
window_class.style <span class="syntax18">=</span> win32con.CS_VREDRAW <span class="syntax18">|</span> win32con.CS_HREDRAW;
window_class.hCursor <span class="syntax18">=</span> win32gui.<span class="syntax6">LoadCursor</span>(<span class="syntax5">0</span>, win32con.IDC_ARROW)
window_class.hbrBackground <span class="syntax18">=</span> win32con.COLOR_WINDOW
window_class.lpfnWndProc <span class="syntax18">=</span> message_map <span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">could</span><span class="syntax1"> </span><span class="syntax1">also</span><span class="syntax1"> </span><span class="syntax1">specify</span><span class="syntax1"> </span><span class="syntax1">a</span><span class="syntax1"> </span><span class="syntax1">wndproc.</span>
classAtom <span class="syntax18">=</span> win32gui.<span class="syntax6">RegisterClass</span>(window_class)
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Create</span><span class="syntax1"> </span><span class="syntax1">the</span><span class="syntax1"> </span><span class="syntax1">Window.</span>
style <span class="syntax18">=</span> win32con.WS_OVERLAPPED <span class="syntax18">|</span> win32con.WS_SYSMENU
self.hwnd <span class="syntax18">=</span> win32gui.<span class="syntax6">CreateWindow</span>(classAtom,
self.window_class_name,
style,
<span class="syntax5">0</span>,
<span class="syntax5">0</span>,
win32con.CW_USEDEFAULT,
win32con.CW_USEDEFAULT,
<span class="syntax5">0</span>,
<span class="syntax5">0</span>,
hinst,
<span class="syntax10">None</span>)
win32gui.<span class="syntax6">UpdateWindow</span>(self.hwnd)
self.notify_id <span class="syntax18">=</span> <span class="syntax10">None</span>
self.<span class="syntax6">refresh_icon</span>()
win32gui.<span class="syntax6">PumpMessages</span>()
<span class="syntax8">def</span> <span class="syntax6">_add_ids_to_menu_options</span>(self, menu_options):
result <span class="syntax18">=</span> []
<span class="syntax8">for</span> menu_option <span class="syntax8">in</span> menu_options:
option_text, option_icon, option_action <span class="syntax18">=</span> menu_option
<span class="syntax8">if</span> <span class="syntax9">callable</span>(option_action) <span class="syntax8">or</span> option_action <span class="syntax8">in</span> self.SPECIAL_ACTIONS:
self.menu_actions_by_id.<span class="syntax6">add</span>((self._next_action_id, option_action))
result.<span class="syntax6">append</span>(menu_option <span class="syntax18">+</span> (self._next_action_id,))
<span class="syntax8">elif</span> <span class="syntax6">non_string_iterable</span>(option_action):
result.<span class="syntax6">append</span>((option_text,
option_icon,
self.<span class="syntax6">_add_ids_to_menu_options</span>(option_action),
self._next_action_id))
<span class="syntax8">else</span>:
<span class="syntax8">print</span> <span class="syntax13">'</span><span class="syntax13">Unknown</span><span class="syntax13"> </span><span class="syntax13">item</span><span class="syntax13">'</span>, option_text, option_icon, option_action
self._next_action_id <span class="syntax18">+</span><span class="syntax18">=</span> <span class="syntax5">1</span>
<span class="syntax8">return</span> result
<span class="syntax8">def</span> <span class="syntax6">refresh_icon</span>(self):
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Try</span><span class="syntax1"> </span><span class="syntax1">and</span><span class="syntax1"> </span><span class="syntax1">find</span><span class="syntax1"> </span><span class="syntax1">a</span><span class="syntax1"> </span><span class="syntax1">custom</span><span class="syntax1"> </span><span class="syntax1">icon</span>
hinst <span class="syntax18">=</span> win32gui.<span class="syntax6">GetModuleHandle</span>(<span class="syntax10">None</span>)
<span class="syntax8">if</span> os.path.<span class="syntax6">isfile</span>(self.icon):
icon_flags <span class="syntax18">=</span> win32con.LR_LOADFROMFILE <span class="syntax18">|</span> win32con.LR_DEFAULTSIZE
hicon <span class="syntax18">=</span> win32gui.<span class="syntax6">LoadImage</span>(hinst,
self.icon,
win32con.IMAGE_ICON,
<span class="syntax5">0</span>,
<span class="syntax5">0</span>,
icon_flags)
<span class="syntax8">else</span>:
<span class="syntax8">print</span> <span class="syntax13">"</span><span class="syntax13">Can</span><span class="syntax13">'</span><span class="syntax13">t</span><span class="syntax13"> </span><span class="syntax13">find</span><span class="syntax13"> </span><span class="syntax13">icon</span><span class="syntax13"> </span><span class="syntax13">file</span><span class="syntax13"> </span><span class="syntax13">-</span><span class="syntax13"> </span><span class="syntax13">using</span><span class="syntax13"> </span><span class="syntax13">default</span><span class="syntax13">.</span><span class="syntax13">"</span>
hicon <span class="syntax18">=</span> win32gui.<span class="syntax6">LoadIcon</span>(<span class="syntax5">0</span>, win32con.IDI_APPLICATION)
<span class="syntax8">if</span> self.notify_id: message <span class="syntax18">=</span> win32gui.NIM_MODIFY
<span class="syntax8">else</span>: message <span class="syntax18">=</span> win32gui.NIM_ADD
self.notify_id <span class="syntax18">=</span> (self.hwnd,
<span class="syntax5">0</span>,
win32gui.NIF_ICON <span class="syntax18">|</span> win32gui.NIF_MESSAGE <span class="syntax18">|</span> win32gui.NIF_TIP,
win32con.WM_USER<span class="syntax18">+</span><span class="syntax5">20</span>,
hicon,
self.hover_text)
win32gui.<span class="syntax6">Shell_NotifyIcon</span>(message, self.notify_id)
<span class="syntax8">def</span> <span class="syntax6">restart</span>(self, hwnd, msg, wparam, lparam):
self.<span class="syntax6">refresh_icon</span>()
<span class="syntax8">def</span> <span class="syntax6">destroy</span>(self, hwnd, msg, wparam, lparam):
<span class="syntax8">if</span> self.on_quit: self.<span class="syntax6">on_quit</span>(self)
nid <span class="syntax18">=</span> (self.hwnd, <span class="syntax5">0</span>)
win32gui.<span class="syntax6">Shell_NotifyIcon</span>(win32gui.NIM_DELETE, nid)
win32gui.<span class="syntax6">PostQuitMessage</span>(<span class="syntax5">0</span>) <span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Terminate</span><span class="syntax1"> </span><span class="syntax1">the</span><span class="syntax1"> </span><span class="syntax1">app.</span>
<span class="syntax8">def</span> <span class="syntax6">notify</span>(self, hwnd, msg, wparam, lparam):
<span class="syntax8">if</span> lparam<span class="syntax18">=</span><span class="syntax18">=</span>win32con.WM_LBUTTONDBLCLK:
self.<span class="syntax6">execute_menu_option</span>(self.default_menu_index <span class="syntax18">+</span> self.FIRST_ID)
<span class="syntax8">elif</span> lparam<span class="syntax18">=</span><span class="syntax18">=</span>win32con.WM_RBUTTONUP:
self.<span class="syntax6">show_menu</span>()
<span class="syntax8">elif</span> lparam<span class="syntax18">=</span><span class="syntax18">=</span>win32con.WM_LBUTTONUP:
<span class="syntax8">pass</span>
<span class="syntax8">return</span> <span class="syntax10">True</span>
<span class="syntax8">def</span> <span class="syntax6">show_menu</span>(self):
menu <span class="syntax18">=</span> win32gui.<span class="syntax6">CreatePopupMenu</span>()
self.<span class="syntax6">create_menu</span>(menu, self.menu_options)
<span class="syntax1">#</span><span class="syntax1">win32gui.SetMenuDefaultItem(menu,</span><span class="syntax1"> </span><span class="syntax1">1000,</span><span class="syntax1"> </span><span class="syntax1">0)</span>
pos <span class="syntax18">=</span> win32gui.<span class="syntax6">GetCursorPos</span>()
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">See</span><span class="syntax1"> </span><span class="syntax1">http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/menus_0hdi.asp</span>
win32gui.<span class="syntax6">SetForegroundWindow</span>(self.hwnd)
win32gui.<span class="syntax6">TrackPopupMenu</span>(menu,
win32con.TPM_LEFTALIGN,
pos[<span class="syntax5">0</span>],
pos[<span class="syntax5">1</span>],
<span class="syntax5">0</span>,
self.hwnd,
<span class="syntax10">None</span>)
win32gui.<span class="syntax6">PostMessage</span>(self.hwnd, win32con.WM_NULL, <span class="syntax5">0</span>, <span class="syntax5">0</span>)
<span class="syntax8">def</span> <span class="syntax6">create_menu</span>(self, menu, menu_options):
<span class="syntax8">for</span> option_text, option_icon, option_action, option_id <span class="syntax8">in</span> menu_options[::<span class="syntax18">-</span><span class="syntax5">1</span>]:
<span class="syntax8">if</span> option_icon:
option_icon <span class="syntax18">=</span> self.<span class="syntax6">prep_menu_icon</span>(option_icon)
<span class="syntax8">if</span> option_id <span class="syntax8">in</span> self.menu_actions_by_id:
item, extras <span class="syntax18">=</span> win32gui_struct.<span class="syntax6">PackMENUITEMINFO</span>(text<span class="syntax18">=</span>option_text,
hbmpItem<span class="syntax18">=</span>option_icon,
wID<span class="syntax18">=</span>option_id)
win32gui.<span class="syntax6">InsertMenuItem</span>(menu, <span class="syntax5">0</span>, <span class="syntax5">1</span>, item)
<span class="syntax8">else</span>:
submenu <span class="syntax18">=</span> win32gui.<span class="syntax6">CreatePopupMenu</span>()
self.<span class="syntax6">create_menu</span>(submenu, option_action)
item, extras <span class="syntax18">=</span> win32gui_struct.<span class="syntax6">PackMENUITEMINFO</span>(text<span class="syntax18">=</span>option_text,
hbmpItem<span class="syntax18">=</span>option_icon,
hSubMenu<span class="syntax18">=</span>submenu)
win32gui.<span class="syntax6">InsertMenuItem</span>(menu, <span class="syntax5">0</span>, <span class="syntax5">1</span>, item)
<span class="syntax8">def</span> <span class="syntax6">prep_menu_icon</span>(self, icon):
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">First</span><span class="syntax1"> </span><span class="syntax1">load</span><span class="syntax1"> </span><span class="syntax1">the</span><span class="syntax1"> </span><span class="syntax1">icon.</span>
ico_x <span class="syntax18">=</span> win32api.<span class="syntax6">GetSystemMetrics</span>(win32con.SM_CXSMICON)
ico_y <span class="syntax18">=</span> win32api.<span class="syntax6">GetSystemMetrics</span>(win32con.SM_CYSMICON)
hicon <span class="syntax18">=</span> win32gui.<span class="syntax6">LoadImage</span>(<span class="syntax5">0</span>, icon, win32con.IMAGE_ICON, ico_x, ico_y, win32con.LR_LOADFROMFILE)
hdcBitmap <span class="syntax18">=</span> win32gui.<span class="syntax6">CreateCompatibleDC</span>(<span class="syntax5">0</span>)
hdcScreen <span class="syntax18">=</span> win32gui.<span class="syntax6">GetDC</span>(<span class="syntax5">0</span>)
hbm <span class="syntax18">=</span> win32gui.<span class="syntax6">CreateCompatibleBitmap</span>(hdcScreen, ico_x, ico_y)
hbmOld <span class="syntax18">=</span> win32gui.<span class="syntax6">SelectObject</span>(hdcBitmap, hbm)
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Fill</span><span class="syntax1"> </span><span class="syntax1">the</span><span class="syntax1"> </span><span class="syntax1">background.</span>
brush <span class="syntax18">=</span> win32gui.<span class="syntax6">GetSysColorBrush</span>(win32con.COLOR_MENU)
win32gui.<span class="syntax6">FillRect</span>(hdcBitmap, (<span class="syntax5">0</span>, <span class="syntax5">0</span>, <span class="syntax5">16</span>, <span class="syntax5">16</span>), brush)
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">unclear</span><span class="syntax1"> </span><span class="syntax1">if</span><span class="syntax1"> </span><span class="syntax1">brush</span><span class="syntax1"> </span><span class="syntax1">needs</span><span class="syntax1"> </span><span class="syntax1">to</span><span class="syntax1"> </span><span class="syntax1">be</span><span class="syntax1"> </span><span class="syntax1">feed.</span><span class="syntax1"> </span><span class="syntax1"> </span><span class="syntax1">Best</span><span class="syntax1"> </span><span class="syntax1">clue</span><span class="syntax1"> </span><span class="syntax1">I</span><span class="syntax1"> </span><span class="syntax1">can</span><span class="syntax1"> </span><span class="syntax1">find</span><span class="syntax1"> </span><span class="syntax1">is:</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">"GetSysColorBrush</span><span class="syntax1"> </span><span class="syntax1">returns</span><span class="syntax1"> </span><span class="syntax1">a</span><span class="syntax1"> </span><span class="syntax1">cached</span><span class="syntax1"> </span><span class="syntax1">brush</span><span class="syntax1"> </span><span class="syntax1">instead</span><span class="syntax1"> </span><span class="syntax1">of</span><span class="syntax1"> </span><span class="syntax1">allocating</span><span class="syntax1"> </span><span class="syntax1">a</span><span class="syntax1"> </span><span class="syntax1">new</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">one."</span><span class="syntax1"> </span><span class="syntax1">-</span><span class="syntax1"> </span><span class="syntax1">implies</span><span class="syntax1"> </span><span class="syntax1">no</span><span class="syntax1"> </span><span class="syntax1">DeleteObject</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">draw</span><span class="syntax1"> </span><span class="syntax1">the</span><span class="syntax1"> </span><span class="syntax1">icon</span>
win32gui.<span class="syntax6">DrawIconEx</span>(hdcBitmap, <span class="syntax5">0</span>, <span class="syntax5">0</span>, hicon, ico_x, ico_y, <span class="syntax5">0</span>, <span class="syntax5">0</span>, win32con.DI_NORMAL)
win32gui.<span class="syntax6">SelectObject</span>(hdcBitmap, hbmOld)
win32gui.<span class="syntax6">DeleteDC</span>(hdcBitmap)
<span class="syntax8">return</span> hbm
<span class="syntax8">def</span> <span class="syntax6">command</span>(self, hwnd, msg, wparam, lparam):
<span class="syntax9">id</span> <span class="syntax18">=</span> win32gui.<span class="syntax6">LOWORD</span>(wparam)
self.<span class="syntax6">execute_menu_option</span>(<span class="syntax9">id</span>)
<span class="syntax8">def</span> <span class="syntax6">execute_menu_option</span>(self, <span class="syntax9">id</span>):
menu_action <span class="syntax18">=</span> self.menu_actions_by_id[<span class="syntax9">id</span>]
<span class="syntax8">if</span> menu_action <span class="syntax18">=</span><span class="syntax18">=</span> self.QUIT:
win32gui.<span class="syntax6">DestroyWindow</span>(self.hwnd)
<span class="syntax8">else</span>:
<span class="syntax6">menu_action</span>(self)
<span class="syntax8">def</span> <span class="syntax6">non_string_iterable</span>(obj):
<span class="syntax8">try</span>:
<span class="syntax9">iter</span>(obj)
<span class="syntax8">except</span> <span class="syntax10">TypeError</span>:
<span class="syntax8">return</span> <span class="syntax10">False</span>
<span class="syntax8">else</span>:
<span class="syntax8">return</span> <span class="syntax8">not</span> <span class="syntax9">isinstance</span>(obj, basestring)
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">Minimal</span><span class="syntax1"> </span><span class="syntax1">self</span><span class="syntax1"> </span><span class="syntax1">test.</span><span class="syntax1"> </span><span class="syntax1">You'll</span><span class="syntax1"> </span><span class="syntax1">need</span><span class="syntax1"> </span><span class="syntax1">a</span><span class="syntax1"> </span><span class="syntax1">bunch</span><span class="syntax1"> </span><span class="syntax1">of</span><span class="syntax1"> </span><span class="syntax1">ICO</span><span class="syntax1"> </span><span class="syntax1">files</span><span class="syntax1"> </span><span class="syntax1">in</span><span class="syntax1"> </span><span class="syntax1">the</span><span class="syntax1"> </span><span class="syntax1">current</span><span class="syntax1"> </span><span class="syntax1">working</span>
<span class="syntax1">#</span><span class="syntax1"> </span><span class="syntax1">directory</span><span class="syntax1"> </span><span class="syntax1">in</span><span class="syntax1"> </span><span class="syntax1">order</span><span class="syntax1"> </span><span class="syntax1">for</span><span class="syntax1"> </span><span class="syntax1">this</span><span class="syntax1"> </span><span class="syntax1">to</span><span class="syntax1"> </span><span class="syntax1">work...</span>
<span class="syntax8">if</span> <span class="syntax10">__name__</span> <span class="syntax18">=</span><span class="syntax18">=</span> <span class="syntax13">'</span><span class="syntax13">__main__</span><span class="syntax13">'</span>:
<span class="syntax8">import</span> itertools, glob
icons <span class="syntax18">=</span> itertools.<span class="syntax6">cycle</span>(glob.<span class="syntax6">glob</span>(<span class="syntax13">'</span><span class="syntax13">*</span><span class="syntax13">.</span><span class="syntax13">ico</span><span class="syntax13">'</span>))
hover_text <span class="syntax18">=</span> <span class="syntax13">"</span><span class="syntax13">SysTrayIcon</span><span class="syntax13">.</span><span class="syntax13">py</span><span class="syntax13"> </span><span class="syntax13">Demo</span><span class="syntax13">"</span>
<span class="syntax8">def</span> <span class="syntax6">hello</span>(sysTrayIcon): <span class="syntax8">print</span> <span class="syntax13">"</span><span class="syntax13">Hello</span><span class="syntax13"> </span><span class="syntax13">World</span><span class="syntax13">.</span><span class="syntax13">"</span>
<span class="syntax8">def</span> <span class="syntax6">simon</span>(sysTrayIcon): <span class="syntax8">print</span> <span class="syntax13">"</span><span class="syntax13">Hello</span><span class="syntax13"> </span><span class="syntax13">Simon</span><span class="syntax13">.</span><span class="syntax13">"</span>
<span class="syntax8">def</span> <span class="syntax6">switch_icon</span>(sysTrayIcon):
sysTrayIcon.icon <span class="syntax18">=</span> icons.<span class="syntax6">next</span>()
sysTrayIcon.<span class="syntax6">refresh_icon</span>()
menu_options <span class="syntax18">=</span> ((<span class="syntax13">'</span><span class="syntax13">Say</span><span class="syntax13"> </span><span class="syntax13">Hello</span><span class="syntax13">'</span>, icons.<span class="syntax6">next</span>(), hello),
(<span class="syntax13">'</span><span class="syntax13">Switch</span><span class="syntax13"> </span><span class="syntax13">Icon</span><span class="syntax13">'</span>, <span class="syntax10">None</span>, switch_icon),
(<span class="syntax13">'</span><span class="syntax13">A</span><span class="syntax13"> </span><span class="syntax13">sub</span><span class="syntax13">-</span><span class="syntax13">menu</span><span class="syntax13">'</span>, icons.<span class="syntax6">next</span>(), ((<span class="syntax13">'</span><span class="syntax13">Say</span><span class="syntax13"> </span><span class="syntax13">Hello</span><span class="syntax13"> </span><span class="syntax13">to</span><span class="syntax13"> </span><span class="syntax13">Simon</span><span class="syntax13">'</span>, icons.<span class="syntax6">next</span>(), simon),
(<span class="syntax13">'</span><span class="syntax13">Switch</span><span class="syntax13"> </span><span class="syntax13">Icon</span><span class="syntax13">'</span>, icons.<span class="syntax6">next</span>(), switch_icon),
))
)
<span class="syntax8">def</span> <span class="syntax6">bye</span>(sysTrayIcon): <span class="syntax8">print</span> <span class="syntax13">'</span><span class="syntax13">Bye</span><span class="syntax13">,</span><span class="syntax13"> </span><span class="syntax13">then</span><span class="syntax13">.</span><span class="syntax13">'</span>
<span class="syntax6">SysTrayIcon</span>(icons.<span class="syntax6">next</span>(), hover_text, menu_options, on_quit<span class="syntax18">=</span>bye, default_menu_index<span class="syntax18">=</span><span class="syntax5">1</span>)
</pre>
</body></html>
| 103.03481
| 917
| 0.649344
| 4,100
| 32,559
| 5.099024
| 0.087805
| 0.316847
| 0.32273
| 0.274562
| 0.843873
| 0.806563
| 0.748493
| 0.689515
| 0.509901
| 0.468765
| 0
| 0.046944
| 0.139654
| 32,559
| 316
| 918
| 103.03481
| 0.699379
| 0.281028
| 0
| 0.25
| 0
| 0.03169
| 0.253969
| 0.063845
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.003521
| 0.03169
| null | null | 0.017606
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
46d1d36ab2ad16a0e971256427767b052d25d460
| 96
|
py
|
Python
|
utest/world.py
|
guiyangwu/tst
|
92b3f9cd30984cc0714eec1fd7d5183bbe44b6d0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
utest/world.py
|
guiyangwu/tst
|
92b3f9cd30984cc0714eec1fd7d5183bbe44b6d0
|
[
"ECL-2.0",
"Apache-2.0"
] | 7
|
2016-10-29T23:54:04.000Z
|
2016-11-30T14:07:08.000Z
|
utest/world.py
|
guiyangwu/tst
|
92b3f9cd30984cc0714eec1fd7d5183bbe44b6d0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import logging
def print_world(*args, **kargs):
logging.debug("print_world, I'm a keyword.")
| 19.2
| 45
| 0.71875
| 15
| 96
| 4.466667
| 0.8
| 0.298507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 96
| 4
| 46
| 24
| 0.797619
| 0
| 0
| 0
| 0
| 0
| 0.284211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
46f05045317830eb1dcde53fd6008d859ea532e2
| 4,898
|
py
|
Python
|
test/runtime/operators_test/concat_test.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | 1
|
2021-04-09T15:55:35.000Z
|
2021-04-09T15:55:35.000Z
|
test/runtime/operators_test/concat_test.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
test/runtime/operators_test/concat_test.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
import numpy as np
from test.util import generate_kernel_test_case
from webdnn.graph.axis import Axis
from webdnn.graph.graph import Graph
from webdnn.graph.operators.concat import Concat
from webdnn.graph.order import OrderNHWC, OrderCNHW, OrderCHWN, OrderNCHW, OrderNC
from webdnn.graph.variable import Variable
def test_2d():
vx1 = np.random.rand(2, 3)
vx2 = np.random.rand(2, 3)
vx3 = np.random.rand(2, 3)
vx4 = np.random.rand(2, 3)
vy = np.concatenate((vx1, vx2, vx3, vx4), 0)
x1 = Variable(vx1.shape, order=OrderNC)
x2 = Variable(vx2.shape, order=OrderNC)
x3 = Variable(vx3.shape, order=OrderNC)
x4 = Variable(vx4.shape, order=OrderNC)
y, = Concat(None, axis=Axis.N)(x1, x2, x3, x4)
generate_kernel_test_case(
description=f"concat_2d",
graph=Graph([x1, x2, x3, x4], [y]),
inputs={
x1: vx1,
x2: vx2,
x3: vx3,
x4: vx4
},
expected={y: vy}
)
def test_2d_odd():
vx1 = np.random.rand(2, 3)
vx2 = np.random.rand(2, 3)
vx3 = np.random.rand(2, 3)
vx4 = np.random.rand(2, 3)
vx5 = np.random.rand(2, 3)
vy = np.concatenate((vx1, vx2, vx3, vx4, vx5), 0)
x1 = Variable(vx1.shape, order=OrderNC)
x2 = Variable(vx2.shape, order=OrderNC)
x3 = Variable(vx3.shape, order=OrderNC)
x4 = Variable(vx4.shape, order=OrderNC)
x5 = Variable(vx5.shape, order=OrderNC)
y, = Concat(None, axis=Axis.N)(x1, x2, x3, x4, x5)
generate_kernel_test_case(
description=f"concat_2d_odd",
graph=Graph([x1, x2, x3, x4, x5], [y]),
inputs={
x1: vx1,
x2: vx2,
x3: vx3,
x4: vx4,
x5: vx5
},
expected={y: vy}
)
def test_major_axis():
vx1 = np.random.rand(2, 3, 4, 5)
vx2 = np.random.rand(2, 3, 4, 5)
vx3 = np.random.rand(2, 3, 4, 5)
vx4 = np.random.rand(2, 3, 4, 5)
vy = np.concatenate((vx1, vx2, vx3, vx4), 0)
x1 = Variable(vx1.shape, order=OrderNHWC)
x2 = Variable(vx2.shape, order=OrderNHWC)
x3 = Variable(vx3.shape, order=OrderNHWC)
x4 = Variable(vx4.shape, order=OrderNHWC)
y, = Concat(None, axis=Axis.N)(x1, x2, x3, x4)
generate_kernel_test_case(
description=f"concat_in_major_axis",
graph=Graph([x1, x2, x3, x4], [y]),
inputs={
x1: vx1,
x2: vx2,
x3: vx3,
x4: vx4
},
expected={y: vy}
)
def test_minor_axis():
vx1 = np.random.rand(2, 3, 4, 5)
vx2 = np.random.rand(2, 3, 4, 5)
vx3 = np.random.rand(2, 3, 4, 5)
vx4 = np.random.rand(2, 3, 4, 5)
vy = np.concatenate((vx1, vx2, vx3, vx4), 3)
x1 = Variable(vx1.shape, order=OrderNHWC)
x2 = Variable(vx2.shape, order=OrderNHWC)
x3 = Variable(vx3.shape, order=OrderNHWC)
x4 = Variable(vx4.shape, order=OrderNHWC)
y, = Concat(None, axis=Axis.C)(x1, x2, x3, x4)
generate_kernel_test_case(
description=f"concat_in_minor_axis",
graph=Graph([x1, x2, x3, x4], [y]),
inputs={
x1: vx1,
x2: vx2,
x3: vx3,
x4: vx4
},
expected={y: vy}
)
def test_middle_axis():
vx1 = np.random.rand(2, 3, 4, 5)
vx2 = np.random.rand(2, 3, 4, 5)
vx3 = np.random.rand(2, 3, 4, 5)
vx4 = np.random.rand(2, 3, 4, 5)
vy = np.concatenate((vx1, vx2, vx3, vx4), 1)
x1 = Variable(vx1.shape, order=OrderNHWC)
x2 = Variable(vx2.shape, order=OrderNHWC)
x3 = Variable(vx3.shape, order=OrderNHWC)
x4 = Variable(vx4.shape, order=OrderNHWC)
y, = Concat(None, axis=Axis.H)(x1, x2, x3, x4)
generate_kernel_test_case(
description=f"concat_in_middle_axis",
graph=Graph([x1, x2, x3, x4], [y]),
inputs={
x1: vx1,
x2: vx2,
x3: vx3,
x4: vx4
},
expected={y: vy}
)
def test_mix_order():
vx1 = np.random.rand(2, 3, 4, 5)
vx2 = np.random.rand(2, 3, 4, 5)
vx3 = np.random.rand(2, 3, 4, 5)
vx4 = np.random.rand(2, 3, 4, 5)
vy = np.concatenate((vx1, vx2, vx3, vx4), 1)
x1 = Variable(vx1.shape, order=OrderNHWC)
x2 = Variable(vx2.shape, order=OrderNHWC)
x3 = Variable(vx3.shape, order=OrderNHWC)
x4 = Variable(vx4.shape, order=OrderNHWC)
x2.change_order(OrderCNHW)
vx2 = np.rollaxis(vx2, 3, 0)
x3.change_order(OrderCHWN)
vx3 = np.rollaxis(np.rollaxis(vx3, 3, 0), 1, 4)
x4.change_order(OrderNCHW)
vx4 = np.rollaxis(vx4, 3, 1)
y, = Concat(None, axis=Axis.H)(x1, x2, x3, x4)
y.change_order(OrderNHWC)
generate_kernel_test_case(
description=f"concat_mix_order",
graph=Graph([x1, x2, x3, x4], [y]),
inputs={
x1: vx1,
x2: vx2,
x3: vx3,
x4: vx4
},
expected={y: vy}
)
| 27.363128
| 82
| 0.564108
| 723
| 4,898
| 3.75242
| 0.085754
| 0.073719
| 0.110579
| 0.119794
| 0.813491
| 0.805013
| 0.798378
| 0.783634
| 0.767416
| 0.767416
| 0
| 0.088311
| 0.285627
| 4,898
| 178
| 83
| 27.516854
| 0.687053
| 0
| 0
| 0.684564
| 1
| 0
| 0.020212
| 0.004287
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040268
| false
| 0
| 0.04698
| 0
| 0.087248
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46f97c98037906d8d0201bb01af5ebaefd56a7d8
| 82
|
py
|
Python
|
python/baseline/dy/classify/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | 2
|
2018-07-06T02:01:12.000Z
|
2018-07-06T02:01:14.000Z
|
python/baseline/dy/classify/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | null | null | null |
python/baseline/dy/classify/__init__.py
|
domyounglee/baseline
|
2261abfb7e770cc6f3d63a7f6e0015238d0e11f8
|
[
"Apache-2.0"
] | 3
|
2019-05-27T04:52:21.000Z
|
2022-02-15T00:22:53.000Z
|
from baseline.dy.classify.train import *
from baseline.dy.classify.model import *
| 27.333333
| 40
| 0.804878
| 12
| 82
| 5.5
| 0.583333
| 0.363636
| 0.424242
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 82
| 2
| 41
| 41
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
20044b1024a76ca161dae865daf61d4c16669fdc
| 1,562
|
py
|
Python
|
lxml_xpath_ipaddress/ip4or6.py
|
jeremyschulman/lxml-xpath-ipaddress
|
d7e884644bd6fae2ca16f8a297ceb471b67e8035
|
[
"MIT"
] | 1
|
2018-10-03T20:39:16.000Z
|
2018-10-03T20:39:16.000Z
|
lxml_xpath_ipaddress/ip4or6.py
|
jeremyschulman/lxmlextipaddress
|
d7e884644bd6fae2ca16f8a297ceb471b67e8035
|
[
"MIT"
] | null | null | null |
lxml_xpath_ipaddress/ip4or6.py
|
jeremyschulman/lxmlextipaddress
|
d7e884644bd6fae2ca16f8a297ceb471b67e8035
|
[
"MIT"
] | null | null | null |
from lxml_xpath_ipaddress.ip4 import *
from lxml_xpath_ipaddress.ip6 import *
# -----------------------------------------------------------------------------------------------------------------
# IP any family
# -----------------------------------------------------------------------------------------------------------------
def is_any_ip(value):
"""
Determine if this given value is an IP address, an IP network value, or an IP interface value;
as defined by the ipaddress module; either IPv4 or IPv6.
Parameters
----------
value : str
The value to check
Returns
-------
bool
True if the value is any valid IP thing
False otherwise
"""
return is_any_ip4(value) or is_any_ip6(value)
def is_host_ip(value):
"""
Determine if this given value is an IP address as defined by the ipaddress module;
either IPv4 or IPv6.
Parameters
----------
value : str
The value to check
Returns
-------
bool
True if the value is any valid IP address
False otherwise
"""
return is_host_ip4(value) or is_host_ip6(value)
def is_net_ip(value):
"""
Determine if this given value is an IP network value, or an IP interface value;
as defined by the ipaddress module; either IPv4 or IPv6.
Parameters
----------
value : str
The value to check
Returns
-------
bool
True if the value is any valid IP thing
False otherwise
"""
return is_net_ip4(value) or is_net_ip6(value)
| 23.313433
| 115
| 0.527529
| 192
| 1,562
| 4.177083
| 0.229167
| 0.037406
| 0.05985
| 0.067332
| 0.741895
| 0.741895
| 0.741895
| 0.741895
| 0.741895
| 0.741895
| 0
| 0.012059
| 0.256722
| 1,562
| 66
| 116
| 23.666667
| 0.678725
| 0.684379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.25
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
200637bf17769bc9254a5910ebae987b3c13cd84
| 15,909
|
py
|
Python
|
sdk/python/pulumi_pagerduty/maintenance_window.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2020-05-27T08:18:35.000Z
|
2021-07-31T08:40:03.000Z
|
sdk/python/pulumi_pagerduty/maintenance_window.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 48
|
2020-05-26T10:59:40.000Z
|
2022-03-31T15:41:54.000Z
|
sdk/python/pulumi_pagerduty/maintenance_window.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-26T17:51:56.000Z
|
2020-05-26T17:51:56.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['MaintenanceWindowArgs', 'MaintenanceWindow']
@pulumi.input_type
class MaintenanceWindowArgs:
def __init__(__self__, *,
end_time: pulumi.Input[str],
services: pulumi.Input[Sequence[pulumi.Input[str]]],
start_time: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a MaintenanceWindow resource.
:param pulumi.Input[str] end_time: The maintenance window's end time. This is when the services will start creating incidents again. This date must be in the future and after the `start_time`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] services: A list of service IDs to include in the maintenance window.
:param pulumi.Input[str] start_time: The maintenance window's start time. This is when the services will stop creating incidents. If this date is in the past, it will be updated to be the current time.
:param pulumi.Input[str] description: A description for the maintenance window.
"""
pulumi.set(__self__, "end_time", end_time)
pulumi.set(__self__, "services", services)
pulumi.set(__self__, "start_time", start_time)
if description is None:
description = 'Managed by Pulumi'
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter(name="endTime")
def end_time(self) -> pulumi.Input[str]:
"""
The maintenance window's end time. This is when the services will start creating incidents again. This date must be in the future and after the `start_time`.
"""
return pulumi.get(self, "end_time")
@end_time.setter
def end_time(self, value: pulumi.Input[str]):
pulumi.set(self, "end_time", value)
@property
@pulumi.getter
def services(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of service IDs to include in the maintenance window.
"""
return pulumi.get(self, "services")
@services.setter
def services(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "services", value)
@property
@pulumi.getter(name="startTime")
def start_time(self) -> pulumi.Input[str]:
"""
The maintenance window's start time. This is when the services will stop creating incidents. If this date is in the past, it will be updated to be the current time.
"""
return pulumi.get(self, "start_time")
@start_time.setter
def start_time(self, value: pulumi.Input[str]):
pulumi.set(self, "start_time", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for the maintenance window.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class _MaintenanceWindowState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
end_time: Optional[pulumi.Input[str]] = None,
services: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
start_time: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering MaintenanceWindow resources.
:param pulumi.Input[str] description: A description for the maintenance window.
:param pulumi.Input[str] end_time: The maintenance window's end time. This is when the services will start creating incidents again. This date must be in the future and after the `start_time`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] services: A list of service IDs to include in the maintenance window.
:param pulumi.Input[str] start_time: The maintenance window's start time. This is when the services will stop creating incidents. If this date is in the past, it will be updated to be the current time.
"""
if description is None:
description = 'Managed by Pulumi'
if description is not None:
pulumi.set(__self__, "description", description)
if end_time is not None:
pulumi.set(__self__, "end_time", end_time)
if services is not None:
pulumi.set(__self__, "services", services)
if start_time is not None:
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for the maintenance window.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="endTime")
def end_time(self) -> Optional[pulumi.Input[str]]:
"""
The maintenance window's end time. This is when the services will start creating incidents again. This date must be in the future and after the `start_time`.
"""
return pulumi.get(self, "end_time")
@end_time.setter
def end_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "end_time", value)
@property
@pulumi.getter
def services(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of service IDs to include in the maintenance window.
"""
return pulumi.get(self, "services")
@services.setter
def services(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "services", value)
@property
@pulumi.getter(name="startTime")
def start_time(self) -> Optional[pulumi.Input[str]]:
"""
The maintenance window's start time. This is when the services will stop creating incidents. If this date is in the past, it will be updated to be the current time.
"""
return pulumi.get(self, "start_time")
@start_time.setter
def start_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "start_time", value)
class MaintenanceWindow(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
end_time: Optional[pulumi.Input[str]] = None,
services: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
start_time: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
A [maintenance window](https://developer.pagerduty.com/api-reference/reference/REST/openapiv3.json/paths/~1maintenance_windows/get) is used to temporarily disable one or more services for a set period of time. No incidents will be triggered and no notifications will be received while a service is disabled by a maintenance window.
Maintenance windows are specified to start at a certain time and end after they have begun. Once started, a maintenance window cannot be deleted; it can only be ended immediately to re-enable the service.
## Example Usage
```python
import pulumi
import pulumi_pagerduty as pagerduty
example = pagerduty.MaintenanceWindow("example",
start_time="2015-11-09T20:00:00-05:00",
end_time="2015-11-09T22:00:00-05:00",
services=[pagerduty_service["example"]["id"]])
```
## Import
Maintenance windows can be imported using the `id`, e.g.
```sh
$ pulumi import pagerduty:index/maintenanceWindow:MaintenanceWindow main PLBP09X
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A description for the maintenance window.
:param pulumi.Input[str] end_time: The maintenance window's end time. This is when the services will start creating incidents again. This date must be in the future and after the `start_time`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] services: A list of service IDs to include in the maintenance window.
:param pulumi.Input[str] start_time: The maintenance window's start time. This is when the services will stop creating incidents. If this date is in the past, it will be updated to be the current time.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MaintenanceWindowArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A [maintenance window](https://developer.pagerduty.com/api-reference/reference/REST/openapiv3.json/paths/~1maintenance_windows/get) is used to temporarily disable one or more services for a set period of time. No incidents will be triggered and no notifications will be received while a service is disabled by a maintenance window.
Maintenance windows are specified to start at a certain time and end after they have begun. Once started, a maintenance window cannot be deleted; it can only be ended immediately to re-enable the service.
## Example Usage
```python
import pulumi
import pulumi_pagerduty as pagerduty
example = pagerduty.MaintenanceWindow("example",
start_time="2015-11-09T20:00:00-05:00",
end_time="2015-11-09T22:00:00-05:00",
services=[pagerduty_service["example"]["id"]])
```
## Import
Maintenance windows can be imported using the `id`, e.g.
```sh
$ pulumi import pagerduty:index/maintenanceWindow:MaintenanceWindow main PLBP09X
```
:param str resource_name: The name of the resource.
:param MaintenanceWindowArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MaintenanceWindowArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
end_time: Optional[pulumi.Input[str]] = None,
services: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
start_time: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MaintenanceWindowArgs.__new__(MaintenanceWindowArgs)
if description is None:
description = 'Managed by Pulumi'
__props__.__dict__["description"] = description
if end_time is None and not opts.urn:
raise TypeError("Missing required property 'end_time'")
__props__.__dict__["end_time"] = end_time
if services is None and not opts.urn:
raise TypeError("Missing required property 'services'")
__props__.__dict__["services"] = services
if start_time is None and not opts.urn:
raise TypeError("Missing required property 'start_time'")
__props__.__dict__["start_time"] = start_time
super(MaintenanceWindow, __self__).__init__(
'pagerduty:index/maintenanceWindow:MaintenanceWindow',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
end_time: Optional[pulumi.Input[str]] = None,
services: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
start_time: Optional[pulumi.Input[str]] = None) -> 'MaintenanceWindow':
"""
Get an existing MaintenanceWindow resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A description for the maintenance window.
:param pulumi.Input[str] end_time: The maintenance window's end time. This is when the services will start creating incidents again. This date must be in the future and after the `start_time`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] services: A list of service IDs to include in the maintenance window.
:param pulumi.Input[str] start_time: The maintenance window's start time. This is when the services will stop creating incidents. If this date is in the past, it will be updated to be the current time.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MaintenanceWindowState.__new__(_MaintenanceWindowState)
__props__.__dict__["description"] = description
__props__.__dict__["end_time"] = end_time
__props__.__dict__["services"] = services
__props__.__dict__["start_time"] = start_time
return MaintenanceWindow(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
A description for the maintenance window.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="endTime")
def end_time(self) -> pulumi.Output[str]:
"""
The maintenance window's end time. This is when the services will start creating incidents again. This date must be in the future and after the `start_time`.
"""
return pulumi.get(self, "end_time")
@property
@pulumi.getter
def services(self) -> pulumi.Output[Sequence[str]]:
"""
A list of service IDs to include in the maintenance window.
"""
return pulumi.get(self, "services")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> pulumi.Output[str]:
"""
The maintenance window's start time. This is when the services will stop creating incidents. If this date is in the past, it will be updated to be the current time.
"""
return pulumi.get(self, "start_time")
| 46.517544
| 339
| 0.658998
| 1,977
| 15,909
| 5.140111
| 0.103187
| 0.07469
| 0.074395
| 0.045463
| 0.817851
| 0.799646
| 0.770911
| 0.740405
| 0.730958
| 0.720724
| 0
| 0.006774
| 0.248413
| 15,909
| 341
| 340
| 46.653959
| 0.843104
| 0.414231
| 0
| 0.615385
| 1
| 0
| 0.094884
| 0.008487
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148352
| false
| 0.005495
| 0.027473
| 0
| 0.263736
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
646b81124801aa70c370b7af34b8f4be10395117
| 35
|
py
|
Python
|
Utils4R/static/__init__.py
|
ChangxingJiang/Utils4R
|
e8ef687107f5d444604fb5750c4de99b0faeb722
|
[
"Apache-2.0"
] | null | null | null |
Utils4R/static/__init__.py
|
ChangxingJiang/Utils4R
|
e8ef687107f5d444604fb5750c4de99b0faeb722
|
[
"Apache-2.0"
] | null | null | null |
Utils4R/static/__init__.py
|
ChangxingJiang/Utils4R
|
e8ef687107f5d444604fb5750c4de99b0faeb722
|
[
"Apache-2.0"
] | null | null | null |
from .user_agent import USER_AGENT
| 17.5
| 34
| 0.857143
| 6
| 35
| 4.666667
| 0.666667
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b378c4a561dba945267cf9c556037c97d2521fb5
| 6,175
|
py
|
Python
|
ext/ANTsPyNet/antspynet/architectures/create_denoising_auto_encoder_super_resolution_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | 2
|
2021-11-16T10:00:33.000Z
|
2021-12-13T02:57:40.000Z
|
ext/ANTsPyNet/antspynet/architectures/create_denoising_auto_encoder_super_resolution_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | null | null | null |
ext/ANTsPyNet/antspynet/architectures/create_denoising_auto_encoder_super_resolution_model.py
|
tsmonteiro/fmri_proc
|
ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1
|
[
"MIT"
] | 1
|
2021-12-13T02:57:27.000Z
|
2021-12-13T02:57:27.000Z
|
from keras.models import Model
from keras.layers import (Input, Average, Add,
Conv2D, Conv2DTranspose,
Conv3D, Conv3DTranspose)
def create_denoising_auto_encoder_super_resolution_model_2d(input_image_size,
convolution_kernel_sizes=[(3, 3), (5, 5)],
number_of_encoding_layers=2,
number_of_filters=64
):
"""
2-D implementation of the denoising autoencoder image super resolution deep learning architecture.
Arguments
---------
input_image_size : tuple of length 3
Used for specifying the input tensor shape. The shape (or dimension) of
that tensor is the image dimensions followed by the number of channels
(e.g., red, green, and blue).
convolution_kernel_sizes : list of 2-d tuples
specifies the kernel size at each convolution layer. Default values are
the same as given in the original paper. The length of kernel size list
must be 1 greater than the tuple length of the number of filters.
number_of_encoding_layers : integer
The number of encoding layers.
number_of_filters : integer
The number of filters for each encoding layer.
Returns
-------
Keras model
A 2-D Keras model defining the network.
Example
-------
>>> model = create_denoising_auto_encoder_super_resolution_model_2d((128, 128, 1))
>>> model.summary()
"""
inputs = Input(shape = input_image_size)
outputs = inputs
encoding_convolution_layers = []
for i in range(number_of_encoding_layers):
if i == 0:
outputs = Conv2D(filters=number_of_filters,
kernel_size=convolution_kernel_sizes[0],
activation='relu',
padding='same')(outputs)
else:
layer = Conv2D(filters=number_of_filters,
kernel_size=convolution_kernel_sizes[0],
activation='relu',
padding='same')(outputs)
encoding_convolution_layers.append(layer)
outputs = encoding_convolution_layers[-1]
for i in range(number_of_encoding_layers):
index = len(encoding_convolution_layers) - i - 1
deconvolution = Conv2DTranspose(filters=number_of_filters,
kernel_size=convolution_kernel_sizes[0],
padding='same',
activation='relu')(outputs)
outputs = Add()([encoding_convolution_layers[index], deconvolution])
number_of_channels = input_image_size[-1]
outputs = Conv2D(filters=number_of_channels,
kernel_size=convolution_kernel_sizes[1],
activation='linear',
padding='same')(outputs)
sr_model = Model(inputs=inputs, outputs=outputs)
return(sr_model)
def create_denoising_auto_encoder_super_resolution_model_3d(input_image_size,
convolution_kernel_sizes=[(3, 3, 3), (5, 5, 5)],
number_of_encoding_layers=2,
number_of_filters=64
):
"""
2-D implementation of the denoising autoencoder image super resolution deep learning architecture.
Arguments
---------
input_image_size : tuple of length 3
Used for specifying the input tensor shape. The shape (or dimension) of
that tensor is the image dimensions followed by the number of channels
(e.g., red, green, and blue).
convolution_kernel_sizes : list of 3-d tuples
specifies the kernel size at each convolution layer. Default values are
the same as given in the original paper. The length of kernel size list
must be 1 greater than the tuple length of the number of filters.
number_of_encoding_layers : integer
The number of encoding layers.
number_of_filters : integer
The number of filters for each encoding layer.
Returns
-------
Keras model
A 3-D Keras model defining the network.
Example
-------
>>> model = create_denoising_auto_encoder_super_resolution_model_3d((128, 128, 128, 1))
>>> model.summary()
"""
inputs = Input(shape = input_image_size)
outputs = inputs
encoding_convolution_layers = []
for i in range(number_of_encoding_layers):
if i == 0:
outputs = Conv3D(filters=number_of_filters,
kernel_size=convolution_kernel_sizes[0],
activation='relu',
padding='same')(outputs)
else:
layer = Conv3D(filters=number_of_filters,
kernel_size=convolution_kernel_sizes[0],
activation='relu',
padding='same')(outputs)
encoding_convolution_layers.append(layer)
outputs = encoding_convolution_layers[-1]
for i in range(number_of_encoding_layers):
index = len(encoding_convolution_layers) - i - 1
deconvolution = Conv3DTranspose(filters=number_of_filters,
kernel_size=convolution_kernel_sizes[0],
padding='same',
activation='relu')(outputs)
outputs = Add()([encoding_convolution_layers[index], deconvolution])
number_of_channels = input_image_size[-1]
outputs = Conv3D(filters=number_of_channels,
kernel_size=convolution_kernel_sizes[1],
activation='linear',
padding='same')(outputs)
sr_model = Model(inputs=inputs, outputs=outputs)
return(sr_model)
| 38.836478
| 108
| 0.569879
| 645
| 6,175
| 5.220155
| 0.168992
| 0.07128
| 0.06237
| 0.07722
| 0.957529
| 0.953965
| 0.953965
| 0.953965
| 0.899911
| 0.899911
| 0
| 0.018959
| 0.359352
| 6,175
| 158
| 109
| 39.082278
| 0.832154
| 0.310607
| 0
| 0.783784
| 0
| 0
| 0.016798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027027
| false
| 0
| 0.027027
| 0
| 0.054054
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.