hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
8387849f0cbf70433979ec7eedac71b381a0cd7e
3,426
py
Python
ukpsummarizer-be/cplex/python/docplex/docplex/mp/operand.py
avineshpvs/vldb2018-sherlock
5e116f42f44c50bcb289be3c4b4b76e29b238c18
[ "Apache-2.0" ]
2
2019-01-13T08:41:00.000Z
2021-03-27T22:55:10.000Z
ukpsummarizer-be/cplex/python/docplex/docplex/mp/operand.py
AIPHES/vldb2018-sherlock
3746efa35c4c1769cc4aaeb15aeb9453564e1226
[ "Apache-2.0" ]
null
null
null
ukpsummarizer-be/cplex/python/docplex/docplex/mp/operand.py
AIPHES/vldb2018-sherlock
3746efa35c4c1769cc4aaeb15aeb9453564e1226
[ "Apache-2.0" ]
4
2018-11-06T16:12:55.000Z
2019-08-21T13:22:32.000Z
# -------------------------------------------------------------------------- # Source file provided under Apache License, Version 2.0, January 2004, # http://www.apache.org/licenses/ # (c) Copyright IBM Corp. 2015, 2016 # -------------------------------------------------------------------------- # gendoc: ignore from docplex.mp.utils import iter_emptyset, is_number from docplex.mp.constants import ComparisonType class Operand(object): __slots__ = () def get_constant(self): return 0 def is_constant(self): return False # --- basic subscription api def notify_used(self, user): pass def notify_unsubscribed(self, subscriber): pass def is_in_use(self): return False def notify_modified(self, event): pass # --- def keep(self): return self def resolve(self): # used for lazy expansions pass def get_linear_part(self): return self def __le__(self, rhs): return self._model._qfactory.new_xconstraint(lhs=self, rhs=rhs, comparaison_type=ComparisonType.LE) def __eq__(self, rhs): return self._model._qfactory.new_xconstraint(lhs=self, rhs=rhs, comparaison_type=ComparisonType.EQ) def __ge__(self, rhs): return self._model._qfactory.new_xconstraint(lhs=self, rhs=rhs, comparaison_type=ComparisonType.GE) le = __le__ eq = __eq__ ge = __ge__ class LinearOperand(Operand): # no ctor as used in multiple inheritance def unchecked_get_coef(self, dvar): raise NotImplementedError('unchecked_get_coef missing for class: {0}'.format(self.__class__)) # pragma: no cover def iter_variables(self): """ Iterates over all variables in the expression. Returns: iterator: An iterator over all variables present in the operand. """ for v, k in self.iter_terms(): yield v def iter_terms(self): # iterates over alllinear terms, if any return iter_emptyset() iter_sorted_terms = iter_terms def number_of_terms(self): return sum(1 for _ in self.iter_terms()) def size(self): return self.number_of_terms() def iter_quads(self): return iter_emptyset() def is_constant(self): # redefine this for subclasses. return False # pragma: no cover def as_variable(self): # return a variable if the expression is actually one variable, else None return None def is_zero(self): return False # no strict comparisons def __lt__(self, e): self.model.unsupported_relational_operator_error(self, "<", e) def __gt__(self, e): self.model.unsupported_relational_operator_error(self, ">", e) def __contains__(self, dvar): """Overloads operator `in` for an expression and a variable. :param: dvar (:class:`docplex.mp.linear.Var`): A decision variable. Returns: Boolean: True if the variable is present in the expression, else False. """ return self.contains_var(dvar) def contains_var(self, dvar): raise NotImplementedError # pragma: no cover def lock_discrete(self): pass def is_discrete_locked(self): return False
27.190476
122
0.597782
398
3,426
4.894472
0.359296
0.056468
0.030801
0.026181
0.189938
0.189938
0.189938
0.189938
0.189938
0.189938
0
0.006885
0.279335
3,426
126
123
27.190476
0.782098
0.277291
0
0.25
0
0
0.019043
0
0
0
0
0
0
1
0.421875
false
0.078125
0.03125
0.25
0.828125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
8395cc826b6713904055364982e4b1af1c00a2b4
7,601
py
Python
newWorld/seleniumDemo/dbshopTestDemo/GoodsManageDemo.py
CypHelp/TestNewWorldDemo
ee6f73df05756f191c1c56250fa290461fdd1b9a
[ "Apache-2.0" ]
null
null
null
newWorld/seleniumDemo/dbshopTestDemo/GoodsManageDemo.py
CypHelp/TestNewWorldDemo
ee6f73df05756f191c1c56250fa290461fdd1b9a
[ "Apache-2.0" ]
null
null
null
newWorld/seleniumDemo/dbshopTestDemo/GoodsManageDemo.py
CypHelp/TestNewWorldDemo
ee6f73df05756f191c1c56250fa290461fdd1b9a
[ "Apache-2.0" ]
null
null
null
# encoding: utf-8 """ @author: yp @software: PyCharm @file: GoodsManageDemo.py @time: 2019/8/1 0001 16:43 """ from AutoTestPlatform.web.WebDriver import Driver driver = Driver() #登录Dbshop driver.get("http://192.168.1.16/DBshop/admin") driver.find_element_by_id_data("user_name", 'admin') driver.find_element_by_id_data("user_passwd", "123456") driver.find_element_by_xpath('//*[@id="admin_login_form"]/button').click() #-------------------------------------------------------------------------------------------- #进入商品管理,添加商品 driver.find_element_by_xpath('/html/body/div[1]/div/ul[1]/li[3]/a').click() driver.find_element_by_xpath('/html/body/div[1]/div/ul[1]/li[3]/ul/li[1]/a').click() driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/p[2]/a[1]').click() #商品基本信息 driver.find_element_by_id_data('goods_name',"ipad ") driver.find_element_by_id_data('goods_extend_name',"mini5") driver.find_element_by_id_data('goods_item',"0551") driver.find_element_by_id_data('goods_price',"6000") driver.find_element_by_id_data('goods_shop_price',"5999") driver.find_element_by_xpath('//*[@id="goods_a"]/div[2]/div[7]/div/table/tbody/tr/td[2]/input').send_keys("5899") driver.find_element_by_id_data("virtual_sales","1000") driver.find_element_by_id_data("goods_weight","15") driver.switch_to_iframe(driver.find_element_by_id("ueditor_0")) driver.find_element_by_xpath('/html/body').send_keys("最实用的ipad,你值得拥有") driver.switch_to_parent_handle() driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button').click() #对商品进行分类 driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/table/tbody/tr[2]/td[9]/a[1]').click() driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[3]/a').click() driver.find_element_by_id('class_id_14').click() driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() #goods库存 driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[5]/a').click() driver.find_element_by_id_data('goods_stock','1000000') driver.find_element_by_id_data('goods_out_of_stock_set','250') driver.find_element_by_id_data('goods_cart_buy_min_num','1') driver.find_element_by_id_data('goods_cart_buy_max_num','99') driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() #优惠价格 driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[6]/a').click() driver.find_element_by_id_data('goods_preferential_price',"4999") driver.find_element_by_id_data('goods_preferential_start_time',"2019-08-05 14:25") driver.find_element_by_id_data('goods_preferential_end_time',"2019-08-09 14:25") driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() #销售规格 driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[7]/a').click() driver.find_element_by_id('ff0000').click() driver.find_element_by_id('other1').click() driver.find_element_by_id_data('price_ff0000other1',"6000") driver.find_element_by_id_data('stock_ff0000other1','100') driver.find_element_by_id_data('item_ff0000other1','0551-001') driver.find_element_by_id_data('weight_ff0000other1',"15") driver.find_element_by_xpath('//*[@id="select_goods_color_size_in"]/tbody/tr/td[8]/table/tbody/tr/td[2]/input').send_keys('2999') driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() #商品属性 driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[8]/a').click() driver.find_element_by_id('attribute_group_id').click() driver.find_element_by_xpath('//*[@id="attribute_group_id"]/option[2]').click() driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() #商品标签 driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[9]/a').click() driver.find_element_by_xpath('//*[@id="goods_l"]/div[2]/div[2]/div/label[1]/input').click() driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() #商品自定义 driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[10]/a').click() driver.find_element_by_xpath('//*[@id="goods_f"]/div[2]/div[2]/label/input').send_keys('蔡徐坤!!!必备') driver.find_element_by_xpath('//*[@id="goods_f"]/div[2]/div[2]/div/input').send_keys("你其实不止是会'唱跳rap打篮球',ipad给你带来新世界的one piece~") driver.find_element_by_xpath('//*[@id="goods_f"]/div[2]/div[2]/div/label/input').click() driver.find_element_by_xpath('//*[@id="goods_f"]/div[2]/div[3]/label/input').send_keys("22世纪的大佬们!!!") driver.find_element_by_xpath('//*[@id="goods_f"]/div[2]/div[3]/div/input').send_keys("大佬无处不在,因为这是22世纪,拥有ipad,你离大佬只是一步之遥") driver.find_element_by_xpath('//*[@id="goods_f"]/div[2]/div[3]/div/label/input').click() driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() # #关联商品 # # driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[11]/a').click() # driver.find_element_by_xpath('//*[@id="relationgoods_id"]').execute_script('type="visable"') # sleep(2) # driver.find_element_by_id_data('relationgoods_id','2') # # d = dr.find_element_by_xpath('//*[@id="mainImgclass"]/div[2]/input') # # dr.execute_script('arguments[0].removeAttribute(\"style\")', d) # # driver.find_element_by_id('relationgoods_id').set_element_visable('visable') # # driver.find_element_by_id_data('relationgoods_id',"2") # # driver.find_element_by_xpath('//*[@id="relation_goods_keyword"]').send_keys('索尼').key_down(Keys.ENTER) # # driver.find_element_by_id_data('relationgoods_id','2').set_element_visable("type='visable'") # driver.find_element_by_xpath('//*[@id="goods_n"]/div[2]/div[2]/button').click() # driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() # #相关商品 # driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[12]/a').click() # driver.find_element_by_id_data('related_goods_keyword','苹果(Apple) iPhone X 64GB 深空灰色 移动联通电信全网通4G手机') # driver.find_element_by_xpath('//*[@id="goods_e"]/div[2]/div[2]/button').click() # driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() # # #组合商品 # driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[13]/a').click() # driver.find_element_by_id_data('combination_goods_keyword','苹果(Apple) iPhone X 64GB 深空灰色 移动联通电信全网通4G手机') # driver.find_element_by_xpath('//*[@id="goods_m"]/div[2]/div[2]/button').click() # driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() # # # # #商品评价 # driver.find_element_by_xpath('/html/body/div[2]/div/div[2]/div/ul/li[14]/a').click() # # driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[1]').click() # # #保存商品 driver.find_element_by_xpath('//*[@id="sticky_navigation_right"]/button[2]').click() #-------------------------------------------------------------------------------------- #商品分类 driver.find_element_by_xpath('/html/body/div[1]/div/ul[1]/li[3]/a').click() driver.find_element_by_xpath('/html/body/div[1]/div/ul[1]/li[3]/ul/li[2]/a') driver.find_element_by_xpath('/html/body/div[1]/div/ul[1]/li[3]/ul/li[2]/ul/li[2]/a').click() #添加侧边信息 driver.find_element_by_xpath('//*[@id="sticky_navigation"]/p[2]/a[1]').click() driver.find_element_by_id_data('frontside_name','我是你得不到的baba') driver.find_element_by_id_data('frontside_url','https://ask.csdn.net/questions/664268') driver.find_element_by_xpath('//*[@id="frontside_class_id"]/option[10]').click() driver.find_element_by_xpath('//*[@id="sticky_navigation"]/div[2]/button').click() #----------------------------------------------------------------------------------------- #退出登录 driver.find_element_by_xpath('/html/body/div[2]/div/div[1]/p[2]/a[2]').click() driver.close()
46.347561
129
0.719774
1,238
7,601
4.089661
0.163166
0.186846
0.220818
0.318981
0.752123
0.734742
0.689512
0.584634
0.508987
0.439068
0
0.0377
0.029864
7,601
163
130
46.631902
0.648902
0.291409
0
0.126761
0
0.211268
0.455418
0.359105
0
0
0
0
0
1
0
false
0.014085
0.014085
0
0.014085
0
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
839600f622fa7dfd1a20501060933e3134f8a05e
313
py
Python
PyCharm/Exercicios/Aula8/ex020.py
fabiodarice/Python
15ec1c7428f138be875111ac98ba38cf2eec1a93
[ "MIT" ]
null
null
null
PyCharm/Exercicios/Aula8/ex020.py
fabiodarice/Python
15ec1c7428f138be875111ac98ba38cf2eec1a93
[ "MIT" ]
null
null
null
PyCharm/Exercicios/Aula8/ex020.py
fabiodarice/Python
15ec1c7428f138be875111ac98ba38cf2eec1a93
[ "MIT" ]
null
null
null
from random import shuffle a1 = input('Digite o nome do aluno 1: ') a2 = input('Digite o nome do aluno 2: ') a3 = input('Digite o nome do aluno 3: ') a4 = input('Digite o nome do aluno 4: ') alunos = [a1, a2, a3, a4] shuffle(alunos) print('A ordem de apresentação será a seguinte \033[34m{}\033[m'.format(alunos))
39.125
80
0.683706
57
313
3.754386
0.526316
0.205607
0.224299
0.299065
0.429907
0.429907
0
0
0
0
0
0.07722
0.172524
313
8
80
39.125
0.749035
0
0
0
0
0
0.509554
0
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0.125
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
839654d15b984574fdcebefa4302cc8d6113a801
218
py
Python
apps/software/views.py
ggjersund/personal-website
c9b0095508c44248e1405925077b4b0ada2f411a
[ "MIT" ]
null
null
null
apps/software/views.py
ggjersund/personal-website
c9b0095508c44248e1405925077b4b0ada2f411a
[ "MIT" ]
6
2019-10-23T15:06:00.000Z
2021-09-15T17:52:15.000Z
apps/software/views.py
ggjersund/personal-website
c9b0095508c44248e1405925077b4b0ada2f411a
[ "MIT" ]
null
null
null
""" Software views """ import socket from django.shortcuts import render def frontpage(request): """ Index view """ return render(request, 'software/software.html', {'hostname': socket.gethostname()})
18.166667
88
0.678899
23
218
6.434783
0.73913
0
0
0
0
0
0
0
0
0
0
0
0.174312
218
11
89
19.818182
0.822222
0.114679
0
0
0
0
0.176471
0.129412
0
0
0
0
0
1
0.25
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
4
83c8d24e7ca2750fca13be129b6914e699ca1eda
468
py
Python
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/settings.py
nathfroech/cookiecutter-pypackage
5b435aa734fbf93a600bc2e88aaa644f9e2df825
[ "BSD-3-Clause" ]
null
null
null
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/settings.py
nathfroech/cookiecutter-pypackage
5b435aa734fbf93a600bc2e88aaa644f9e2df825
[ "BSD-3-Clause" ]
null
null
null
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/settings.py
nathfroech/cookiecutter-pypackage
5b435aa734fbf93a600bc2e88aaa644f9e2df825
[ "BSD-3-Clause" ]
null
null
null
""" Global settings for project. May be just some literals, or path-related values. {%- if cookiecutter.use_environment_based_settings %} All environment-based settings should be declared here too. {%- endif %} """ import pathlib {%- if cookiecutter.use_environment_based_settings %} from dotenv import load_dotenv # type: ignore from environs import Env load_dotenv() env = Env() env.read_env() {%- endif %} BASE_DIR = pathlib.Path(__file__).resolve().parent
19.5
59
0.75
63
468
5.349206
0.619048
0.142433
0.21365
0.166172
0.243323
0.243323
0
0
0
0
0
0
0.138889
468
23
60
20.347826
0.836228
0.025641
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.333333
null
null
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
83df53c2b28b5b00d6c2cf99eebbdf55bf1c8eb6
91
py
Python
src/example_validate.py
RichardOkubo/PythonScripts
86090465f739a2fc3f1f8ef22977efd241f97361
[ "MIT" ]
null
null
null
src/example_validate.py
RichardOkubo/PythonScripts
86090465f739a2fc3f1f8ef22977efd241f97361
[ "MIT" ]
null
null
null
src/example_validate.py
RichardOkubo/PythonScripts
86090465f739a2fc3f1f8ef22977efd241f97361
[ "MIT" ]
null
null
null
from validate import validate @validate def sub(x: int, y: int) -> int: return x - y
13
31
0.648352
15
91
3.933333
0.6
0
0
0
0
0
0
0
0
0
0
0
0.241758
91
6
32
15.166667
0.855072
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
83e9694e3f620e7e3bb4a9be03aa41f32793f466
126
py
Python
users/serializer.py
Micahtugi/Awards
b01f787686c920c9a765ea212357dc50b079277e
[ "Unlicense" ]
null
null
null
users/serializer.py
Micahtugi/Awards
b01f787686c920c9a765ea212357dc50b079277e
[ "Unlicense" ]
6
2020-02-12T00:50:19.000Z
2022-01-13T01:23:26.000Z
users/serializer.py
Micahtugi/Awards
b01f787686c920c9a765ea212357dc50b079277e
[ "Unlicense" ]
null
null
null
from rest_framework import serializers from .models import Profile class ProfileSerializer(serializers.ModelSerializer):
25.2
53
0.84127
13
126
8.076923
0.769231
0
0
0
0
0
0
0
0
0
0
0
0.119048
126
5
54
25.2
0.945946
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.666667
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
83fc7817e7a2c289156666833164c4fe86ea21b3
92
py
Python
no_covers/apps.py
qiwiGremL1n/blog
2ed534c0c62d91603f39da6b1c7e421b1cbf4047
[ "MIT" ]
null
null
null
no_covers/apps.py
qiwiGremL1n/blog
2ed534c0c62d91603f39da6b1c7e421b1cbf4047
[ "MIT" ]
null
null
null
no_covers/apps.py
qiwiGremL1n/blog
2ed534c0c62d91603f39da6b1c7e421b1cbf4047
[ "MIT" ]
null
null
null
from django.apps import AppConfig class NoCoversConfig(AppConfig): name = 'no_covers'
15.333333
33
0.76087
11
92
6.272727
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.163043
92
5
34
18.4
0.896104
0
0
0
0
0
0.097826
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f7c5e836dd01c9374182a548fd89ead3dc36d2aa
146
py
Python
api_key.py
apreble21/python-api-challenge
de9c066473a10cb28976aca38c61d703ff1669c0
[ "ADSL" ]
null
null
null
api_key.py
apreble21/python-api-challenge
de9c066473a10cb28976aca38c61d703ff1669c0
[ "ADSL" ]
null
null
null
api_key.py
apreble21/python-api-challenge
de9c066473a10cb28976aca38c61d703ff1669c0
[ "ADSL" ]
null
null
null
# OpenWeatherMap API Key weather_api_key = "601b4c14f4ddb46a0080bbfb5ca51d3e" # Google API Key g_key = "AIzaSyDNUFB01N6sBwZfPznGBiHayHJrON12pYw"
24.333333
52
0.842466
13
146
9.230769
0.615385
0.15
0
0
0
0
0
0
0
0
0
0.167939
0.10274
146
5
53
29.2
0.748092
0.253425
0
0
0
0
0.669811
0.669811
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f7dc475b41f557a908d7125b18175f13969f5b0d
95
py
Python
localusers/apps.py
SentF/henix
960636f4ffa053ef26016a37bd895801ce47c099
[ "Unlicense" ]
null
null
null
localusers/apps.py
SentF/henix
960636f4ffa053ef26016a37bd895801ce47c099
[ "Unlicense" ]
null
null
null
localusers/apps.py
SentF/henix
960636f4ffa053ef26016a37bd895801ce47c099
[ "Unlicense" ]
null
null
null
from django.apps import AppConfig class LocalusersConfig(AppConfig): name = 'localusers'
15.833333
34
0.768421
10
95
7.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.157895
95
5
35
19
0.9125
0
0
0
0
0
0.105263
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f7e4644e320ce710e993c01363469095dcd3a0ed
52
py
Python
code/Dirs.py
bradkav/DarkAxionPortal
5716e0684cf0f7e84f0a4de00a37734deff71d7b
[ "MIT" ]
null
null
null
code/Dirs.py
bradkav/DarkAxionPortal
5716e0684cf0f7e84f0a4de00a37734deff71d7b
[ "MIT" ]
null
null
null
code/Dirs.py
bradkav/DarkAxionPortal
5716e0684cf0f7e84f0a4de00a37734deff71d7b
[ "MIT" ]
null
null
null
axionlimits_dir = "/Users/bradkav/Code/AxionLimits/"
52
52
0.807692
6
52
6.833333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.038462
52
1
52
52
0.82
0
0
0
0
0
0.603774
0.603774
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f7e842d55b300343c8ba53e377bfb9d9cef766fb
3,239
py
Python
fdrtd/plugins/simon/accumulators/accumulator_statistics_bivariate.py
fdrtd/simon
46926200c74f17f48d27a0d7b195f14b293dda4a
[ "MIT" ]
null
null
null
fdrtd/plugins/simon/accumulators/accumulator_statistics_bivariate.py
fdrtd/simon
46926200c74f17f48d27a0d7b195f14b293dda4a
[ "MIT" ]
null
null
null
fdrtd/plugins/simon/accumulators/accumulator_statistics_bivariate.py
fdrtd/simon
46926200c74f17f48d27a0d7b195f14b293dda4a
[ "MIT" ]
null
null
null
import math as _math from fdrtd.plugins.simon.accumulators.accumulator import Accumulator from fdrtd.plugins.simon.accumulators.accumulator_statistics_univariate import AccumulatorStatisticsUnivariate class AccumulatorStatisticsBivariate(Accumulator): def __init__(self, _=None): self.accumulator_x = AccumulatorStatisticsUnivariate() self.accumulator_y = AccumulatorStatisticsUnivariate() self.accumulator_xy = AccumulatorStatisticsUnivariate() def serialize(self): return {'accumulator_x': self.accumulator_x.serialize(), 'accumulator_y': self.accumulator_y.serialize(), 'accumulator_xy': self.accumulator_xy.serialize()} @staticmethod def deserialize(dictionary): accumulator = AccumulatorStatisticsBivariate() accumulator.accumulator_x = AccumulatorStatisticsUnivariate.deserialize(dictionary['accumulator_x']) accumulator.accumulator_y = AccumulatorStatisticsUnivariate.deserialize(dictionary['accumulator_y']) accumulator.accumulator_xy = AccumulatorStatisticsUnivariate.deserialize(dictionary['accumulator_xy']) return accumulator def add(self, other): self.accumulator_x.add(other.accumulator_x) self.accumulator_y.add(other.accumulator_y) self.accumulator_xy.add(other.accumulator_xy) def update(self, data): (x, y) = data self.accumulator_x.update(x) self.accumulator_y.update(y) self.accumulator_xy.update(x*y) def finalize(self): self.accumulator_x.finalize() self.accumulator_y.finalize() self.accumulator_xy.finalize() def encrypt_data_for_upload(self, nonce): return {'accumulator_x': self.accumulator_x.encrypt_data_for_upload(nonce), 'accumulator_y': self.accumulator_y.encrypt_data_for_upload(nonce), 'accumulator_xy': self.accumulator_xy.encrypt_data_for_upload(nonce, power=2)} @staticmethod def decrypt_result_from_download(encrypted, nonce): decryption_powers = {'samples': 0, 'covariance_mle': 2, 'covariance': 2, 'correlation_coefficient': 0, 'regression_slope': 0, 'regression_interceipt': 1, 'regression_slope_only': 0} return nonce.decrypt_dictionary_numerical(encrypted, decryption_powers) def get_samples(self): return self.accumulator_xy.get_samples() def get_covariance_mle(self): return self.accumulator_xy.get_mean() - self.accumulator_x.get_mean() * self.accumulator_y.get_mean() def get_covariance(self): return self.get_covariance_mle() / (1.0 - 1.0 / self.accumulator_xy.get_samples()) def get_correlation_coefficient(self): return self.get_covariance() / _math.sqrt(self.accumulator_x.get_variance() * self.accumulator_y.get_variance()) def get_regression_slope(self): return self.get_covariance() / self.accumulator_x.get_variance() def get_regression_interceipt(self): return self.accumulator_y.get_mean() - self.get_regression_slope() * self.accumulator_x.get_mean() def get_regression_slope_only(self): return self.accumulator_xy.get_mean() / self.accumulator_x.calculate_raw_moment(2)
43.77027
120
0.727694
359
3,239
6.256267
0.172702
0.200356
0.078362
0.035619
0.345948
0.179875
0.073909
0.044524
0.044524
0.044524
0
0.004867
0.175363
3,239
73
121
44.369863
0.836016
0
0
0.036364
0
0
0.071627
0.020068
0
0
0
0
0
1
0.272727
false
0
0.054545
0.163636
0.545455
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
f7eeaa099b83aecb7abbc258520807361c03cb88
65
py
Python
src_tf/templates/tf_dataset_template/base/base_model.py
ashishpatel26/finch
bf2958c0f268575e5d51ad08fbc08b151cbea962
[ "MIT" ]
1
2019-02-12T09:22:00.000Z
2019-02-12T09:22:00.000Z
src_tf/templates/tf_dataset_template/base/base_model.py
loopzxl/finch
bf2958c0f268575e5d51ad08fbc08b151cbea962
[ "MIT" ]
null
null
null
src_tf/templates/tf_dataset_template/base/base_model.py
loopzxl/finch
bf2958c0f268575e5d51ad08fbc08b151cbea962
[ "MIT" ]
1
2020-10-15T21:34:17.000Z
2020-10-15T21:34:17.000Z
class BaseModel: def __init__(self): self.ops = {}
10.833333
23
0.553846
7
65
4.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.323077
65
5
24
13
0.727273
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
f7fb97db3220bbc64ca010f0349740e9c79f310a
178
py
Python
bus_system/apps/bus_driver/admin.py
pygabo/bus_system
ffb76d3414e058286799f3df1cb551b26286e7c3
[ "MIT" ]
null
null
null
bus_system/apps/bus_driver/admin.py
pygabo/bus_system
ffb76d3414e058286799f3df1cb551b26286e7c3
[ "MIT" ]
null
null
null
bus_system/apps/bus_driver/admin.py
pygabo/bus_system
ffb76d3414e058286799f3df1cb551b26286e7c3
[ "MIT" ]
null
null
null
# Core Django imports from django.contrib import admin # Imports from my apps from bus_system.apps.bus_driver.models import BusDriverModel admin.site.register(BusDriverModel)
22.25
60
0.825843
25
178
5.8
0.64
0.151724
0
0
0
0
0
0
0
0
0
0
0.117978
178
7
61
25.428571
0.923567
0.230337
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
7913c04ccb3d3a404fd572f5ba2d25d3927e3c07
415
py
Python
WebClassApp/mainpage/serializers.py
jesuscol96/WebClassApp
092bde4cb16f09f3efafc32af904715fae59773a
[ "MIT" ]
null
null
null
WebClassApp/mainpage/serializers.py
jesuscol96/WebClassApp
092bde4cb16f09f3efafc32af904715fae59773a
[ "MIT" ]
null
null
null
WebClassApp/mainpage/serializers.py
jesuscol96/WebClassApp
092bde4cb16f09f3efafc32af904715fae59773a
[ "MIT" ]
null
null
null
from rest_framework import serializers from django.contrib.auth.models import User from .models import * class UserSerializer(serializers.ModelSerializer): class Meta: model= User fields = ['username','first_name','last_name','password','email','is_superuser'] class RolesSerializer(serializers.ModelSerializer): class Meta: model= Roles fields = ['name']
25.9375
89
0.684337
43
415
6.511628
0.604651
0.085714
0.221429
0.25
0.285714
0
0
0
0
0
0
0
0.214458
415
15
90
27.666667
0.858896
0
0
0.181818
0
0
0.14
0
0
0
0
0
0
1
0
false
0.090909
0.272727
0
0.636364
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
7936dd133e3fdbf46aa9c6c09a12e6cbc0c9cc6a
169
py
Python
device_captures/models.py
mohbandy/probr-core
df152c4fe0d0e5e393f11154db38dc56dcefb636
[ "MIT" ]
45
2015-08-11T11:37:46.000Z
2022-03-27T19:27:56.000Z
device_captures/models.py
mohbandy/probr-core
df152c4fe0d0e5e393f11154db38dc56dcefb636
[ "MIT" ]
33
2015-08-11T10:23:44.000Z
2022-03-01T15:57:15.000Z
device_captures/models.py
mohbandy/probr-core
df152c4fe0d0e5e393f11154db38dc56dcefb636
[ "MIT" ]
23
2015-10-06T17:07:54.000Z
2021-11-12T10:47:17.000Z
from captures.models import Capture from django.db import models from devices.models import Device class DeviceCapture(Capture): device = models.ForeignKey(Device)
24.142857
38
0.810651
22
169
6.227273
0.545455
0.175182
0
0
0
0
0
0
0
0
0
0
0.130178
169
7
38
24.142857
0.931973
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.6
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f738c8d7c592d77c0c81025d123c1cecf33946f4
154
py
Python
pysensors/basis/__init__.py
Jimmy-INL/pysensors
62b79a233a551ae01125e20e06fde0c96b4dffd2
[ "MIT" ]
null
null
null
pysensors/basis/__init__.py
Jimmy-INL/pysensors
62b79a233a551ae01125e20e06fde0c96b4dffd2
[ "MIT" ]
null
null
null
pysensors/basis/__init__.py
Jimmy-INL/pysensors
62b79a233a551ae01125e20e06fde0c96b4dffd2
[ "MIT" ]
null
null
null
from ._identity import Identity from ._random_projection import RandomProjection from ._svd import SVD __all__ = ["Identity", "SVD", "RandomProjection"]
25.666667
49
0.792208
17
154
6.705882
0.470588
0
0
0
0
0
0
0
0
0
0
0
0.116883
154
5
50
30.8
0.838235
0
0
0
0
0
0.175325
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f7464013eeebcdb4986304f0e5c49ade64cbb57c
113
py
Python
commit_checker/tests/__main__.py
hrome/commit_checker
b212cb9cf6728b6bb9006097fa8211e9a06537b8
[ "MIT" ]
2
2017-11-24T12:28:50.000Z
2018-12-28T10:13:40.000Z
commit_checker/tests/functional/__main__.py
hrome/commit_checker
b212cb9cf6728b6bb9006097fa8211e9a06537b8
[ "MIT" ]
null
null
null
commit_checker/tests/functional/__main__.py
hrome/commit_checker
b212cb9cf6728b6bb9006097fa8211e9a06537b8
[ "MIT" ]
null
null
null
import os from nose.core import TestProgram os.chdir(os.path.abspath(os.path.dirname(__file__))) TestProgram()
16.142857
52
0.787611
17
113
5
0.647059
0.141176
0
0
0
0
0
0
0
0
0
0
0.088496
113
6
53
18.833333
0.825243
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
f76a3bf1b294256e0e46b5250fdd278b0773cccd
1,620
py
Python
cohesity_management_sdk/models/environment_list_protection_sources_enum.py
cohesity/management-sdk-python
867d8c0c40dd317cdb017902c895527da7ae31c0
[ "Apache-2.0" ]
18
2019-09-24T17:35:53.000Z
2022-03-25T08:08:47.000Z
cohesity_management_sdk/models/environment_list_protection_sources_enum.py
cohesity/management-sdk-python
867d8c0c40dd317cdb017902c895527da7ae31c0
[ "Apache-2.0" ]
18
2019-03-29T19:32:29.000Z
2022-01-03T23:16:45.000Z
cohesity_management_sdk/models/environment_list_protection_sources_enum.py
cohesity/management-sdk-python
867d8c0c40dd317cdb017902c895527da7ae31c0
[ "Apache-2.0" ]
16
2019-02-27T06:54:12.000Z
2021-11-16T18:10:24.000Z
# -*- coding: utf-8 -*- # Copyright 2021 Cohesity Inc. class EnvironmentListProtectionSourcesEnum(object): """Implementation of the 'environment_ListProtectionSources' enum. TODO: type enum description here. Attributes: K_VMWARE: TODO: type description here. KSQL: TODO: type description here. KVIEW: TODO: type description here. KPUPPETEER: TODO: type description here. KPHYSICAL: TODO: type description here. KPURE: TODO: type description here. KNETAPP: TODO: type description here. KGENERICNAS: TODO: type description here. K_HYPERV: TODO: type description here. KACROPOLIS: TODO: type description here. KAZURE: TODO: type description here. KKUBERNETES: TODO: type description here. KCASSANDRA: TODO: type description here. KMONGODB: TODO: type description here. KCOUCHBASE: TODO: type description here. KHDFS: TODO: type description here. KHIVE: TODO: type description here. KHBASE: TODO: type description here. KUDA: TODO: type description here. """ K_VMWARE = 'kVMware' KSQL = 'kSQL' KVIEW = 'kView' KPUPPETEER = 'kPuppeteer' KPHYSICAL = 'kPhysical' KPURE = 'kPure' KNETAPP = 'kNetapp' KGENERICNAS = 'kGenericNas' K_HYPERV = 'kHyperV' KACROPOLIS = 'kAcropolis' KAZURE = 'kAzure' KKUBERNETES = 'kKubernetes' KCASSANDRA = 'kCassandra' KMONGODB = 'kMongoDB' KCOUCHBASE = 'kCouchbase' KHDFS = 'kHdfs' KHIVE = 'kHive' KHBASE = 'kHBase' KUDA = 'kUDA'
22.816901
70
0.638889
159
1,620
6.477987
0.27044
0.15534
0.350485
0.424272
0.046602
0
0
0
0
0
0
0.004255
0.274691
1,620
70
71
23.142857
0.87234
0.598765
0
0
0
0
0.253623
0
0
0
0
0.285714
0
1
0
false
0
0
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
4
f780b53286805d43cf8d3e4449572f04d4dcff75
6,693
py
Python
looking_for_group/rpgcollections/utils.py
andrlik/looking-for-group
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
[ "BSD-3-Clause" ]
null
null
null
looking_for_group/rpgcollections/utils.py
andrlik/looking-for-group
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
[ "BSD-3-Clause" ]
null
null
null
looking_for_group/rpgcollections/utils.py
andrlik/looking-for-group
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
[ "BSD-3-Clause" ]
null
null
null
from django.contrib.contenttypes.models import ContentType from ..game_catalog import models as catalog_models from . import models def get_distinct_games(library): sb_ct = ContentType.objects.get_for_model(catalog_models.SourceBook) md_ct = ContentType.objects.get_for_model(catalog_models.PublishedModule) sourcebook_games = catalog_models.PublishedGame.objects.filter( id__in=[ sb.edition.game.pk for sb in catalog_models.SourceBook.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=sb_ct ) ] ).select_related("edition", "edition__game") ] ).order_by("title") module_games = catalog_models.PublishedGame.objects.filter( id__in=[ md.parent_game_edition.game.pk for md in catalog_models.PublishedModule.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=md_ct ) ] ).select_related("parent_game_edition", "parent_game_edition__game") ] ).order_by("title") games = sourcebook_games.union(module_games).order_by("title") return games def get_distinct_editions(library): sb_ct = ContentType.objects.get_for_model(catalog_models.SourceBook) md_ct = ContentType.objects.get_for_model(catalog_models.PublishedModule) sourcebook_editions = ( catalog_models.GameEdition.objects.filter( id__in=[ sb.edition.pk for sb in catalog_models.SourceBook.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=sb_ct ) ] ).select_related("edition") ] ) .select_related("game") .order_by("game__title", "release_date") ) module_editions = ( catalog_models.GameEdition.objects.filter( id__in=[ md.parent_game_edition.pk for md in catalog_models.PublishedModule.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=md_ct ) ] ).select_related("parent_game_edition") ] ) .select_related("game") .order_by("game__title", "release_date") ) editions = sourcebook_editions.union(module_editions).order_by( "game__title", "release_date" ) return editions def get_distinct_systems(library): sb_ct = ContentType.objects.get_for_model(catalog_models.SourceBook) md_ct = ContentType.objects.get_for_model(catalog_models.PublishedModule) sys_ct = ContentType.objects.get_for_model(catalog_models.GameSystem) sourcebook_systems = catalog_models.GameSystem.objects.filter( id__in=[ sb.edition.game_system.pk for sb in catalog_models.SourceBook.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=sb_ct ) ], edition__game_system__isnull=False, ).select_related("edition", "edition__game_system") ] ).order_by("name", "publication_date") module_systems = catalog_models.GameSystem.objects.filter( id__in=[ md.parent_game_edition.game_system.pk for md in catalog_models.PublishedModule.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=md_ct ) ], parent_game_edition__game_system__isnull=False, ).select_related("parent_game_edition", "parent_game_edition__game_system") ] ).order_by("name", "publication_date") system_systems = catalog_models.GameSystem.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter(library=library, content_type=sys_ct) ] ).order_by("name", "publication_date") sb_and_md = sourcebook_systems.union(module_systems) systems = sb_and_md.union(system_systems).order_by("name", "publication_date") return systems def get_distinct_publishers(library): sb_ct = ContentType.objects.get_for_model(catalog_models.SourceBook) md_ct = ContentType.objects.get_for_model(catalog_models.PublishedModule) sys_ct = ContentType.objects.get_for_model(catalog_models.GameSystem) sourcebook_publishers = catalog_models.GamePublisher.objects.filter( id__in=[ sb.publisher.pk for sb in catalog_models.SourceBook.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=sb_ct ) ] ) ] ).order_by("name") module_publishers = catalog_models.GamePublisher.objects.filter( id__in=[ md.publisher.pk for md in catalog_models.PublishedModule.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=md_ct ) ] ).select_related("publisher") ] ).order_by("name") system_publishers = catalog_models.GamePublisher.objects.filter( id__in=[ sys.original_publisher.pk for sys in catalog_models.GameSystem.objects.filter( id__in=[ b.content_object.pk for b in models.Book.objects.filter( library=library, content_type=sys_ct ) ] ).select_related("original_publisher") ] ).order_by("name") sb_and_md = sourcebook_publishers.union(module_publishers) publishers = sb_and_md.union(system_publishers).order_by("name") return publishers
38.912791
87
0.585089
700
6,693
5.245714
0.085714
0.106209
0.077614
0.087963
0.827342
0.784858
0.775054
0.749183
0.609205
0.56781
0
0
0.332885
6,693
171
88
39.140351
0.822396
0
0
0.493827
0
0
0.057224
0.008516
0
0
0
0
0
1
0.024691
false
0
0.018519
0
0.067901
0
0
0
0
null
0
0
0
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e398c9cb3f7797fc2675aea92c940e9f6259d99d
250
py
Python
image_vision/plugins/visualizers/registry.py
IvanKosik/ImageVision
038b2b3948a16adc4c2abb3bc8c1c32f62aa4319
[ "BSD-3-Clause" ]
null
null
null
image_vision/plugins/visualizers/registry.py
IvanKosik/ImageVision
038b2b3948a16adc4c2abb3bc8c1c32f62aa4319
[ "BSD-3-Clause" ]
null
null
null
image_vision/plugins/visualizers/registry.py
IvanKosik/ImageVision
038b2b3948a16adc4c2abb3bc8c1c32f62aa4319
[ "BSD-3-Clause" ]
null
null
null
from core import Plugin from extensions.visualizers import DataVisualizerRegistry class DataVisualizerRegistryPlugin(Plugin): def __init__(self): super().__init__() self.visualizers_registry = DataVisualizerRegistry()
25
61
0.744
21
250
8.428571
0.666667
0.090395
0
0
0
0
0
0
0
0
0
0
0.192
250
9
62
27.777778
0.876238
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
e3a2355730a5d5adebb58f1e888f6f8df8743d7e
108
py
Python
resource-timing/resources/eventsource.py
meyerweb/wpt
f04261533819893c71289614c03434c06856c13e
[ "BSD-3-Clause" ]
14,668
2015-01-01T01:57:10.000Z
2022-03-31T23:33:32.000Z
resource-timing/resources/eventsource.py
meyerweb/wpt
f04261533819893c71289614c03434c06856c13e
[ "BSD-3-Clause" ]
7,642
2018-05-28T09:38:03.000Z
2022-03-31T20:55:48.000Z
resource-timing/resources/eventsource.py
meyerweb/wpt
f04261533819893c71289614c03434c06856c13e
[ "BSD-3-Clause" ]
5,941
2015-01-02T11:32:21.000Z
2022-03-31T16:35:46.000Z
def main(request, response): response.headers.set(b"Content-Type", b"text/event-stream") return u""
27
63
0.694444
16
108
4.6875
0.875
0
0
0
0
0
0
0
0
0
0
0
0.138889
108
3
64
36
0.806452
0
0
0
0
0
0.268519
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
e3c0bfd415010b834e3a379c979e37202495e059
233
py
Python
flaskr/models/user_role.py
tuhinpaul/flask-sample-project
fb4cae2d00b7c1e1318f44e477f71ed93ecaed52
[ "MIT" ]
null
null
null
flaskr/models/user_role.py
tuhinpaul/flask-sample-project
fb4cae2d00b7c1e1318f44e477f71ed93ecaed52
[ "MIT" ]
null
null
null
flaskr/models/user_role.py
tuhinpaul/flask-sample-project
fb4cae2d00b7c1e1318f44e477f71ed93ecaed52
[ "MIT" ]
null
null
null
from sqlalchemy import Column, Integer, String, Float from .base import Base class UserRole(Base): __tablename__ = 'UserRole' id = Column(Integer, primary_key=True) userId = Column(Integer) roleId = Column(Integer)
23.3
53
0.72103
28
233
5.821429
0.607143
0.319018
0
0
0
0
0
0
0
0
0
0
0.188841
233
9
54
25.888889
0.862434
0
0
0
0
0
0.034335
0
0
0
0
0
0
1
0
false
0
0.285714
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
e3e002bcda1c127e55cf91be255d6c11fb96f6c1
2,948
py
Python
tests/test_stack_yaml.py
AFCYBER-DREAM/piperci-mindflayer
bfa1cce9f48563f34b4fd8cc8b92b54018f11be9
[ "MIT" ]
null
null
null
tests/test_stack_yaml.py
AFCYBER-DREAM/piperci-mindflayer
bfa1cce9f48563f34b4fd8cc8b92b54018f11be9
[ "MIT" ]
2
2019-06-05T15:31:41.000Z
2019-06-07T17:56:12.000Z
tests/test_stack_yaml.py
AFCYBER-DREAM/piperci-mindflayer
bfa1cce9f48563f34b4fd8cc8b92b54018f11be9
[ "MIT" ]
2
2019-05-21T20:33:29.000Z
2019-06-05T13:44:41.000Z
import os def test_stack_functions(stack_data): errMsg = 'Error: \'stack.yml\' file does not contain any functions.' assert 'functions' in stack_data, errMsg def test_git_ignore(stack_dir): path = os.path.join(stack_dir, '.gitignore') errMsg = f'Error: No \'.gitignore\' file found in {stack_dir}.' assert os.path.exists(os.path.realpath(path)), errMsg def test_stack_handlers(stack_function, stack_dir): path = os.path.join(stack_dir, stack_function['handler']) errMsg = f'Error: No directory for {stack_function["handler"]}.' assert os.path.exists(os.path.realpath(path)), errMsg def test_stack_handlers_file(stack_function, stack_dir): path = os.path.join(stack_dir, stack_function['handler'], 'handler.py') errMsg = (f'Error: \'{stack_function["handler"]}\' does not contain ' '\'handler.py\' file.') assert os.path.exists(os.path.realpath(path)), errMsg def test_stack_handlers_requires(stack_function, stack_dir): path = os.path.join(stack_dir, stack_function['handler'], 'requirements.txt') errMsg = (f'Error: \'{stack_function["handler"]}\' does not contain ' '\'requirements.txt\' file.') assert os.path.exists(os.path.realpath(path)), errMsg def test_stack_handlers_init(stack_function, stack_dir): path = os.path.join(stack_dir, stack_function['handler'], '__init__.py') errMsg = (f'Error: \'{stack_function["handler"]}\' does not contain ' '\'__init__.py\' file.') assert os.path.exists(os.path.realpath(path)), errMsg def test_stack_langs(stack_function, stack_dir): path = os.path.join(stack_dir, 'template', stack_function['lang']) errMsg = (f'Error: No directory for {stack_function["lang"]} is present in ' '\'template\' directory.') assert os.path.exists(os.path.realpath(path)), errMsg def test_stack_langs_dockerfile(stack_function, stack_dir): path = os.path.join(stack_dir, 'template', stack_function['lang'], 'Dockerfile') errMsg = (f'Error: \'template/{stack_function["lang"]}/\' does not contain ' '\'Dockerfile\'.') assert os.path.exists(os.path.realpath(path)), errMsg def test_stack_langs_requires(stack_function, stack_dir): path = os.path.join(stack_dir, 'template', stack_function['lang'], 'requirements.txt') errMsg = (f'Error: \'template/{stack_function["lang"]}/\' does not contain ' '\'requirements.txt\' file.') assert os.path.exists(os.path.realpath(path)), errMsg def test_only_langs(stack_data, stack_dir): lang_set = {v['lang'] for k, v in stack_data['functions'].items()} dir_set = set(os.listdir(os.path.join(stack_dir, 'template'))) warnMsg = ('Warning: Unused language templates are present in \'template\' ' 'directory.') assert len(dir_set - lang_set) == 0, warnMsg
39.837838
80
0.662144
387
2,948
4.834625
0.149871
0.080171
0.062533
0.072154
0.7814
0.734367
0.72047
0.72047
0.662747
0.645644
0
0.000418
0.187924
2,948
73
81
40.383562
0.781119
0
0
0.403846
0
0
0.213026
0.017639
0
0
0
0
0.192308
1
0.192308
false
0
0.019231
0
0.211538
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e3e7b6608fbc1741aa7a35fb7da3577023ac131b
147
py
Python
fatd/measure/accountability/data/tools.py
AnthropocentricAI/fat-dummy
08fc3e6c55e11f664c541283dde5a1cc6fd40298
[ "BSD-3-Clause" ]
null
null
null
fatd/measure/accountability/data/tools.py
AnthropocentricAI/fat-dummy
08fc3e6c55e11f664c541283dde5a1cc6fd40298
[ "BSD-3-Clause" ]
null
null
null
fatd/measure/accountability/data/tools.py
AnthropocentricAI/fat-dummy
08fc3e6c55e11f664c541283dde5a1cc6fd40298
[ "BSD-3-Clause" ]
null
null
null
import numpy as np def class_count(data_holder): unique, counts = np.unique(data_holder.target, return_counts=True) return unique, counts
24.5
70
0.761905
22
147
4.909091
0.636364
0.185185
0
0
0
0
0
0
0
0
0
0
0.156463
147
5
71
29.4
0.870968
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
540ad43bc746e83d4ed301ab8b904082adcde0b2
88
py
Python
text.py
Ravindra-14/python-01
189d19b8987eb01ed3b1f6261ad1da24e0d338be
[ "Apache-2.0" ]
null
null
null
text.py
Ravindra-14/python-01
189d19b8987eb01ed3b1f6261ad1da24e0d338be
[ "Apache-2.0" ]
null
null
null
text.py
Ravindra-14/python-01
189d19b8987eb01ed3b1f6261ad1da24e0d338be
[ "Apache-2.0" ]
null
null
null
print("hello") count=1 if count<1: print("yes") else: print("no")
12.571429
22
0.488636
12
88
3.583333
0.666667
0.27907
0
0
0
0
0
0
0
0
0
0.033898
0.329545
88
6
23
14.666667
0.694915
0
0
0
0
0
0.121951
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
5412ff2bd0dae926fd1fbf97be2d9ce144344885
61
py
Python
django_version.py
Muflhi01/flow-dashboard
993320e2eb0f86d89b9904a3d5415c7479c5918e
[ "MIT" ]
1,623
2017-03-11T11:49:48.000Z
2022-03-30T06:44:11.000Z
django_version.py
Muflhi01/flow-dashboard
993320e2eb0f86d89b9904a3d5415c7479c5918e
[ "MIT" ]
136
2017-03-11T17:08:57.000Z
2022-03-09T21:38:46.000Z
django_version.py
Muflhi01/flow-dashboard
993320e2eb0f86d89b9904a3d5415c7479c5918e
[ "MIT" ]
217
2017-05-06T14:28:36.000Z
2022-03-29T16:56:01.000Z
import os os.environ["DJANGO_SETTINGS_MODULE"] = "settings"
15.25
49
0.770492
8
61
5.625
0.75
0
0
0
0
0
0
0
0
0
0
0
0.098361
61
3
50
20.333333
0.818182
0
0
0
0
0
0.5
0.366667
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
54275bb038f023a711df21868a4e7e60e27c8dbb
152
py
Python
PYTHON/Tuples.py
MatheusKlebson/Programming-in-English
36b1746378802cfe169b6138d036fcb9a140eaad
[ "MIT" ]
null
null
null
PYTHON/Tuples.py
MatheusKlebson/Programming-in-English
36b1746378802cfe169b6138d036fcb9a140eaad
[ "MIT" ]
null
null
null
PYTHON/Tuples.py
MatheusKlebson/Programming-in-English
36b1746378802cfe169b6138d036fcb9a140eaad
[ "MIT" ]
null
null
null
firt_tuple = (5,5,4,6,1,2,3) new_list = list(firt_tuple) new_tuple = tuple(new_list) print(len(firt_tuple)) print(max(new_list)) print(min(new_tuple))
19
28
0.736842
31
152
3.354839
0.451613
0.259615
0.230769
0
0
0
0
0
0
0
0
0.05036
0.085526
152
8
29
19
0.697842
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
588a7cff1907ee5869e3abd8472d79cde6f0e56c
165
py
Python
probability/gamma_test2.py
peterhogan/python
bc6764f7794a862ff0d138bad80f1d6313984dcd
[ "MIT" ]
null
null
null
probability/gamma_test2.py
peterhogan/python
bc6764f7794a862ff0d138bad80f1d6313984dcd
[ "MIT" ]
null
null
null
probability/gamma_test2.py
peterhogan/python
bc6764f7794a862ff0d138bad80f1d6313984dcd
[ "MIT" ]
null
null
null
from scipy.integrate import quad def integrand(t,n,x): return exp(-x*t)/t**n def expint(n,x): return quad(integrand, 0, Inf, args(n,x))[0] print(expint(2,1.0))
16.5
45
0.672727
34
165
3.264706
0.558824
0.054054
0.144144
0
0
0
0
0
0
0
0
0.034722
0.127273
165
9
46
18.333333
0.736111
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0.333333
0.833333
0.166667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
589a4e8832a1c82348910df930db979cfb479cc5
6,084
py
Python
src/dewloosh/solid/fem/cells/gen/b3.py
dewloosh/dewloosh-solid
dbd6757ddd1373df870ccd99f5ee791c08d342cb
[ "MIT" ]
null
null
null
src/dewloosh/solid/fem/cells/gen/b3.py
dewloosh/dewloosh-solid
dbd6757ddd1373df870ccd99f5ee791c08d342cb
[ "MIT" ]
null
null
null
src/dewloosh/solid/fem/cells/gen/b3.py
dewloosh/dewloosh-solid
dbd6757ddd1373df870ccd99f5ee791c08d342cb
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import numpy as np from numpy import ndarray from numba import njit, prange __cache = True @njit(nogil=True, cache=__cache) def shape_function_values(x, L): """ Evaluates the shape functions at a point x in the range [-1, 1]. """ return np.array([ [ 0.5 * x * (x - 1), x**2 * (0.75 * x + 1.0) * (x - 1)**2, x**2 * (0.75 * x + 1.0) * (x - 1)**2, 0.5 * x * (x - 1), -0.125 * L * x**2 * (x - 1)**2 * (x + 1), 0.125 * L * x**2 * (x - 1)**2 * (x + 1) ], [ 1.0 - 1.0 * x**2, 1.0 * (x - 1)**2 * (x + 1)**2, 1.0 * (x - 1)**2 * (x + 1)**2, 1.0 - 1.0 * x**2, L * x * (-0.5 * x**4 + 1.0 * x**2 - 0.5), 0.5 * L * x * (x - 1)**2 * (x + 1)**2 ], [ 0.5 * x * (x + 1), x**2 * (1.0 - 0.75 * x) * (x + 1)**2, x**2 * (1.0 - 0.75 * x) * (x + 1)**2, 0.5 * x * (x + 1), -0.125 * L * x**2 * (x - 1) * (x + 1)**2, 0.125 * L * x**2 * (x - 1) * (x + 1)**2 ] ]) @njit(nogil=True, cache=__cache) def shape_function_derivatives_1(x, L): """ Evaluates the first derivatives of the shape functions at a point x in the range [-1, 1]. """ return np.array([ [ 1.0 * x - 0.5, 3.75 * x * (x - 1) * (1.0 * x - 0.533333333333333) * (x + 1), 3.75 * x * (x - 1) * (1.0 * x - 0.533333333333333) * (x + 1), 1.0 * x - 0.5, L * x * (-0.625 * x**3 + 0.5 * x**2 + 0.375 * x - 0.25), 0.625 * L * x * (x - 1) * (1.0 * x**2 + 0.2 * x - 0.4) ], [ -2.0 * x, 4.0 * x * (x**2 - 1), 4.0 * x * (x**2 - 1), -2.0 * x, L * (-2.5 * x**4 + 3.0 * x**2 - 0.5), L * (2.5 * x**4 - 3.0 * x**2 + 0.5) ], [ 1.0 * x + 0.5, -3.75 * x * (x - 1) * (1.0 * x + 0.533333333333333) * (x + 1), -3.75 * x * (x - 1) * (1.0 * x + 0.533333333333333) * (x + 1), 1.0 * x + 0.5, 0.625 * L * x * (x + 1) * (-1.0 * x**2 + 0.2 * x + 0.4), L * x * (0.625 * x**3 + 0.5 * x**2 - 0.375 * x - 0.25) ] ]) @njit(nogil=True, cache=__cache) def shape_function_derivatives_2(x, L): """ Evaluates the second derivatives of the shape functions at a point x in the range [-1, 1]. """ return np.array([ [ 1.00000000000000, 15.0 * x**3 - 6.0 * x**2 - 7.5 * x + 2.0, 15.0 * x**3 - 6.0 * x**2 - 7.5 * x + 2.0, 1.00000000000000, L * (-2.5 * x**3 + 1.5 * x**2 + 0.75 * x - 0.25), L * (2.5 * x**3 - 1.5 * x**2 - 0.75 * x + 0.25) ], [ -2.00000000000000, 12.0 * x**2 - 4.0, 12.0 * x**2 - 4.0, -2.00000000000000, L * x * (6.0 - 10.0 * x**2), L * x * (10.0 * x**2 - 6.0) ], [ 1.00000000000000, -15.0 * x**3 - 6.0 * x**2 + 7.5 * x + 2.0, -15.0 * x**3 - 6.0 * x**2 + 7.5 * x + 2.0, 1.00000000000000, L * (-2.5 * x**3 - 1.5 * x**2 + 0.75 * x + 0.25), L * (2.5 * x**3 + 1.5 * x**2 - 0.75 * x - 0.25) ] ]) @njit(nogil=True, cache=__cache) def shape_function_derivatives_3(x, L): """ Evaluates the third derivatives of the shape functions at a point x in the range [-1, 1]. """ return np.array([ [ 0, 45.0 * x**2 - 12.0 * x - 7.5, 45.0 * x**2 - 12.0 * x - 7.5, 0, L * (-7.5 * x**2 + 3.0 * x + 0.75), L * (7.5 * x**2 - 3.0 * x - 0.75) ], [ 0, 24.0 * x, 24.0 * x, 0, L * (6.0 - 30.0 * x**2), L * (30.0 * x**2 - 6.0) ], [ 0, -45.0 * x**2 - 12.0 * x + 7.5, -45.0 * x**2 - 12.0 * x + 7.5, 0, L * (-7.5 * x**2 - 3.0 * x + 0.75), L * (7.5 * x**2 + 3.0 * x - 0.75) ] ]) @njit(nogil=True, parallel=True, cache=__cache) def shape_function_values_bulk(x: ndarray, L: ndarray): """ Evaluates the shape functions at several points in the range [-1, 1]. Parameters ---------- x : 1d numpy float array The points of interest in the range [-1, -1] Returns ------- numpy float array of shape (nE, nP, nNE, nDOF=6) """ nP = x.shape[0] nE = L.shape[0] res = np.zeros((nE, nP, 3, 6), dtype=x.dtype) for iE in prange(nE): for iP in prange(nP): res[iE, iP] = shape_function_values(x[iP], L[iE]) return res @njit(nogil=True, cache=__cache) def shape_function_derivatives(x, L): """ Evaluates the derivatives of the shape functions at a point x in the range [-1, 1]. Parameters ---------- x : float The point of interest in the range [-1, -1] djac : float Determinant of the Jacobi matrix of local-global transformation between the master elment and the actual element. Default is 1.0. Returns ------- numpy float array of shape (nNE, nDOF=6, 3) """ res = np.zeros((3, 6, 3)) res[:, :, 0] = shape_function_derivatives_1(x, L) res[:, :, 1] = shape_function_derivatives_2(x, L) res[:, :, 2] = shape_function_derivatives_3(x, L) return res @njit(nogil=True, parallel=True, cache=__cache) def shape_function_derivatives_bulk(x: ndarray, L: ndarray): """ Evaluates the derivatives of the shape functions at several points in the range [-1, 1]. Returns ------- dshp (nE, nP, nNE, nDOF=6, 3) """ nP = x.shape[0] nE = L.shape[0] res = np.zeros((nE, nP, 3, 6, 3), dtype=x.dtype) for iE in prange(nE): for iP in prange(nP): res[iE, iP] = shape_function_derivatives(x[iP], L[iE]) return res
28.834123
74
0.406969
987
6,084
2.45998
0.092199
0.04201
0.025947
0.016474
0.826606
0.799423
0.707578
0.649094
0.614498
0.573311
0
0.177583
0.40023
6,084
210
75
28.971429
0.487805
0.182281
0
0.58156
0
0
0
0
0
0
0
0
0
1
0.049645
false
0
0.021277
0
0.120567
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
544d6ab3c93e8ea09e3212e798c6ae35d747cb10
121
py
Python
maml_trpo/policies/base_policy.py
adityabingi/maml-trpo-metaworld
9247018e72563a5cbf3df9ce7c384aef9812d18b
[ "MIT" ]
null
null
null
maml_trpo/policies/base_policy.py
adityabingi/maml-trpo-metaworld
9247018e72563a5cbf3df9ce7c384aef9812d18b
[ "MIT" ]
null
null
null
maml_trpo/policies/base_policy.py
adityabingi/maml-trpo-metaworld
9247018e72563a5cbf3df9ce7c384aef9812d18b
[ "MIT" ]
null
null
null
import torch import torch.nn as nn class BasePolicy(nn.Module): def__init__(self): def update_params(self):
8.066667
28
0.710744
18
121
4.5
0.666667
0.271605
0
0
0
0
0
0
0
0
0
0
0.198347
121
15
29
8.066667
0.835052
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.4
null
null
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
545a7d273c29c5e2b2386ded130c6883b5c4bd98
384
py
Python
hearthstone/simulator/replay/observer.py
JDBumgardner/stone_ground_hearth_battles
9fe095651fab60e8ddbf563f0b9b7f3e723d5f4f
[ "Apache-2.0" ]
20
2020-08-01T03:14:57.000Z
2021-12-19T11:47:50.000Z
hearthstone/simulator/replay/observer.py
JDBumgardner/stone_ground_hearth_battles
9fe095651fab60e8ddbf563f0b9b7f3e723d5f4f
[ "Apache-2.0" ]
48
2020-08-01T03:06:43.000Z
2022-02-27T10:03:47.000Z
hearthstone/simulator/replay/observer.py
JDBumgardner/stone_ground_hearth_battles
9fe095651fab60e8ddbf563f0b9b7f3e723d5f4f
[ "Apache-2.0" ]
3
2020-06-28T01:23:37.000Z
2021-11-11T23:09:36.000Z
from typing import Any from hearthstone.simulator.agent.actions import Action from hearthstone.simulator.core.tavern import Tavern Annotation = Any class Observer: def name(self) -> str: pass def on_action(self, tavern: 'Tavern', player: str, action: 'Action') -> Annotation: pass def on_game_over(self, tavern: 'Tavern') -> Annotation: pass
21.333333
87
0.690104
48
384
5.458333
0.479167
0.114504
0.183206
0
0
0
0
0
0
0
0
0
0.213542
384
17
88
22.588235
0.86755
0
0
0.272727
0
0
0.046875
0
0
0
0
0
0
1
0.272727
false
0.272727
0.272727
0
0.636364
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
5467aa342ec4b4d507a2adfb89432d7d8d59b569
1,094
py
Python
dunner/preprocess_helper.py
ebegen/Dunner
36e3ab6edb3692a9713cdca02badf45da8153ce8
[ "MIT" ]
null
null
null
dunner/preprocess_helper.py
ebegen/Dunner
36e3ab6edb3692a9713cdca02badf45da8153ce8
[ "MIT" ]
null
null
null
dunner/preprocess_helper.py
ebegen/Dunner
36e3ab6edb3692a9713cdca02badf45da8153ce8
[ "MIT" ]
null
null
null
import numpy as np import pandas as pd from multiprocessing import cpu_count, Pool import dask.dataframe as ddf import swifter class PreprocessHelper(object): def __init__(self): pass def parallelize(self, data, func): # cores = cpu_count() # Number of CPU cores on your system # partitions = cores-2 # Define as many partitions as you want # # data_split = np.array_split(data, partitions) # pool = Pool(cores) # data = pd.concat(pool.map(func, data_split)) # pool.close() # pool.join() #df_dask = ddf.from_pandas(data, npartitions=cpu_count()-2) #new_data = df_dask.apply(lambda x: func(x), meta=('str'), axis=1).compute(scheduler='multiprocessing') new_data = data.swifter.apply(lambda row: func(row), axis=1) return new_data def pipeline(self, function_list=None): ''' serial function executer method :param function_list: functions dictionary :return: ''' for name, func in function_list.items(): func()
29.567568
111
0.626143
139
1,094
4.791367
0.510791
0.036036
0
0
0
0
0
0
0
0
0
0.005006
0.269653
1,094
36
112
30.388889
0.828536
0.454296
0
0
0
0
0
0
0
0
0
0
0
1
0.214286
false
0.071429
0.357143
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
1
0
1
0
0
4
548157d1356804fbe2837dd6f05690a62424edf1
320
py
Python
dteenergybridge/exceptions.py
kylehendricks/dteenergybridge
26b9b280ca16c7c86e679d5dc30c0faa0cbcf6eb
[ "MIT" ]
2
2018-09-28T01:55:30.000Z
2020-05-26T02:54:46.000Z
dteenergybridge/exceptions.py
kylehendricks/dteenergybridge
26b9b280ca16c7c86e679d5dc30c0faa0cbcf6eb
[ "MIT" ]
3
2019-01-09T19:51:57.000Z
2021-11-15T18:24:11.000Z
dteenergybridge/exceptions.py
kylehendricks/dteenergybridge
26b9b280ca16c7c86e679d5dc30c0faa0cbcf6eb
[ "MIT" ]
null
null
null
"""DTE Energy Bridge Exceptions.""" class DteEnergyBridgeError(Exception): """Base class for all DTE Energy Bridge exceptions""" class InvalidResponseError(DteEnergyBridgeError): """Response from DTE Energy Bridge was invalid""" class InvalidArgumentError(DteEnergyBridgeError): """Invalid argument"""
22.857143
57
0.753125
30
320
8.033333
0.566667
0.112033
0.186722
0.207469
0.248963
0
0
0
0
0
0
0
0.140625
320
13
58
24.615385
0.876364
0.43125
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4
54832c4e5cb7953181c83261c693adc1034cbabd
203
py
Python
backend/clients/api/urls.py
gitdevstar/tikatok
78729028f20eda822d9ef36634685feb69d5a3a5
[ "Apache-2.0" ]
null
null
null
backend/clients/api/urls.py
gitdevstar/tikatok
78729028f20eda822d9ef36634685feb69d5a3a5
[ "Apache-2.0" ]
null
null
null
backend/clients/api/urls.py
gitdevstar/tikatok
78729028f20eda822d9ef36634685feb69d5a3a5
[ "Apache-2.0" ]
null
null
null
from clients.api.views import ClientsViewSet from rest_framework.routers import DefaultRouter router = DefaultRouter() router.register(r'', ClientsViewSet, basename='clients') urlpatterns = router.urls
29
56
0.82266
23
203
7.217391
0.695652
0.228916
0
0
0
0
0
0
0
0
0
0
0.08867
203
6
57
33.833333
0.897297
0
0
0
0
0
0.034483
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
54862f003a93a7d1a8a92841d4d25ad2e37e1c02
1,769
py
Python
src/BSL_IHI/variables.py
juhuntenburg/pipelines
9904065cccb8e316cece5451f595a24774f07bd5
[ "MIT" ]
13
2019-03-10T23:13:06.000Z
2022-02-08T08:49:28.000Z
src/BSL_IHI/variables.py
juhuntenburg/pipelines
9904065cccb8e316cece5451f595a24774f07bd5
[ "MIT" ]
1
2015-03-31T20:42:08.000Z
2015-04-03T23:58:58.000Z
src/BSL_IHI/variables.py
NeuroanatomyAndConnectivity/pipelines
9904065cccb8e316cece5451f595a24774f07bd5
[ "MIT" ]
18
2015-01-08T13:27:40.000Z
2021-06-22T03:35:45.000Z
''' Created on Feb 20, 2013 @author: gorgolewski, steele ''' #import os #subjects = os.listdir("/scr/namibia1/baird/MPI_Project/Neuroimaging_Data/") working_dir = "/scr/alaska1/steele/BSL_IHI/processing/cmt" results_dir = "/scr/alaska1/steele/BSL_IHI/processing/cmt/results" freesurfer_dir = '/scr/alaska1/steele/BSL_IHI/processing/freesurfer/' subjects_M = ['KCDT100819_T1.TRIO', 'JA7T100824_T1.TRIO', '17230.95_20111026_T1.TRIO', 'SJAT_100416_T1.TRIO', 'DM6T100909_T1.TRIO', 'NS5T090217_T1.TRIO', '11530.56_090910_T1.TRIO', '15205.bb_20110818_T1.TRIO', 'BSLT100916__T1.TRIO', 'SF8T100916_T1.TRIO', 'SAST_100421_T1.TRIO', 'SMXT100805_T1.TRIO', 'MN3T090909_T1.TRIO', 'ED2T101126_T1.TRIO', 'LP4T091026_T1.TRIO', 'DS9T101110_T1.TRIO', 'GD4T100909_T1.TRIO', 'AS3T100715_T1.TRIO', 'SL6T101119_T1.TRIO', 'UF1T100824_T1.TRIO', '12522.80_20110818_T1.TRIO', 'GC6T100805_T1.TRIO', '15832.a8_20110616_T1.TRIO', 'KAHT101103_T1.TRIO', '16833.de_20111025_T1.TRIO', 'SCMT101110_T1.TRIO', 'KE5T100909_T1.TRIO', '14841.b6_20111026_T1.TRIO'] subjects_NM= ['MMJT100420_T1.TRIO', 'RMFT100708_T1.TRIO', 'KG6T100708_T1.TRIO', 'GMOT100628_T1.TRIO', 'STCT090817_T1.TRIO', 'DH2T100420_T1.TRIO', '12612.9b_20090318_T1_TRIO', 'RSET090817_T1.TRIO', 'LC7T100629_T1.TRIO', '14102.d1_20111024_T1.TRIO', 'DA5T110620_T1.TRIO', '16687.41_20111025_T1.TRIO', '01212.43_20090617_T1.TRIO', 'BC9T100831_T1.TRIO', 'NC3T090721_T1.TRIO', 'WMCT090817_T1.TRIO', 'WSFT100322_T1.TRIO', 'BSGT081016_T1.TRIO', '10060.70_20111025_T1.TRIO', 'HCBT060321_T1.DTI', '11401.38_111025_T1.TRIO', '10576.44_20091217_T1.TRIO', '15510.c9_20111110_T1.TRIO', 'JR1T090216_T1.TRIO', 'WF5T091110_T1.TRIO', 'WT6T090807_T1.TRIO', 'HN3T090610_T1.TRIO', 'SU3T090819_T1.TRIO']
24.232877
76
0.751837
252
1,769
4.940476
0.464286
0.26506
0.031325
0.045783
0.100402
0.100402
0.100402
0.072289
0.072289
0
0
0.337846
0.081402
1,769
72
77
24.569444
0.428308
0.07801
0
0
0
0
0.773935
0.316862
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
548d396c2b5196bb71f8c11e8c2e71544047cb42
604
py
Python
python/mixer_shortcode/errors.py
AlinGhinoiu/shortcode-oauth
9fc4a44ee3a96ec743bd94efb1715cf1acee0996
[ "MIT" ]
10
2018-09-19T20:13:35.000Z
2020-05-23T22:38:52.000Z
python/mixer_shortcode/errors.py
AlinGhinoiu/shortcode-oauth
9fc4a44ee3a96ec743bd94efb1715cf1acee0996
[ "MIT" ]
7
2018-09-21T18:03:31.000Z
2020-04-25T18:26:56.000Z
python/mixer_shortcode/errors.py
AlinGhinoiu/shortcode-oauth
9fc4a44ee3a96ec743bd94efb1715cf1acee0996
[ "MIT" ]
9
2019-01-27T04:08:22.000Z
2020-01-18T20:43:35.000Z
class ShortCodeError(Exception): """Base exception raised when some unexpected event occurs in the shortcode OAuth flow.""" pass class UnknownShortCodeError(ShortCodeError): """Exception raised when an unknown error happens while running shortcode OAuth. """ pass class ShortCodeAccessDeniedError(ShortCodeError): """Exception raised when the user denies access to the client in shortcode OAuth.""" pass class ShortCodeTimeoutError(ShortCodeError): """Exception raised when the shortcode expires without being accepted.""" pass
26.26087
80
0.710265
62
604
6.919355
0.548387
0.214452
0.177156
0.230769
0.167832
0
0
0
0
0
0
0
0.22351
604
22
81
27.454545
0.914712
0.511589
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
5490c7251251b33877c0edf654cc2f79d064ab79
55
py
Python
Python/Input/Input.py
sachinprabhu007/HackerRank-Solutions
f42d3c1e989b288e42b4674a926d007aa22940a1
[ "MIT" ]
null
null
null
Python/Input/Input.py
sachinprabhu007/HackerRank-Solutions
f42d3c1e989b288e42b4674a926d007aa22940a1
[ "MIT" ]
1
2019-01-16T12:13:29.000Z
2019-01-16T14:57:57.000Z
Python/Input/Input.py
sachinprabhu007/HackerRank-Solutions
f42d3c1e989b288e42b4674a926d007aa22940a1
[ "MIT" ]
null
null
null
x,k = map(int,input().split()) print(k==eval(input()))
27.5
31
0.6
10
55
3.3
0.8
0
0
0
0
0
0
0
0
0
0
0
0.072727
55
2
32
27.5
0.647059
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
54b5c79d63bf4c07865a1e26472a55bfc9ea5e90
399
py
Python
test1/forms.py
djsaeedkhan/django-ip-validator
99af5285e1e0ef74d49aae2dc93693fdbb9b8628
[ "Apache-2.0" ]
null
null
null
test1/forms.py
djsaeedkhan/django-ip-validator
99af5285e1e0ef74d49aae2dc93693fdbb9b8628
[ "Apache-2.0" ]
null
null
null
test1/forms.py
djsaeedkhan/django-ip-validator
99af5285e1e0ef74d49aae2dc93693fdbb9b8628
[ "Apache-2.0" ]
null
null
null
from django import forms from django.core.validators import RegexValidator, EmailValidator, validate_ipv46_address validate_hostname = RegexValidator(regex=r'[a-zA-Z0-9-_]*\.[a-zA-Z]{2,6}') my_validator = RegexValidator(r'[a-zA-Z0-9-_]*\.[a-zA-Z]{2,6}', "Your string should contain letter A in it.") class CacheCheck(forms.Form): ip = forms.CharField(max_length=100,validators=[my_validator])
44.333333
109
0.75188
62
399
4.709677
0.612903
0.041096
0.027397
0.041096
0.089041
0.089041
0.089041
0.089041
0.089041
0.089041
0
0.035714
0.087719
399
8
110
49.875
0.766484
0
0
0
0
0.333333
0.250627
0.145363
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
54c4542000e5d54e1971578a0e73c7e34feca16d
6,860
py
Python
src/model/activation.py
Ziems/OBST
e31f460616d8bc29931f069843e4f94b7f38e260
[ "BSD-2-Clause" ]
15
2021-06-26T10:03:07.000Z
2021-12-04T12:56:36.000Z
src/model/activation.py
Ziems/OBST
e31f460616d8bc29931f069843e4f94b7f38e260
[ "BSD-2-Clause" ]
null
null
null
src/model/activation.py
Ziems/OBST
e31f460616d8bc29931f069843e4f94b7f38e260
[ "BSD-2-Clause" ]
2
2021-06-24T14:15:31.000Z
2021-12-09T16:11:40.000Z
import mesh_tensorflow as mtf import numpy as np import tensorflow as tf from .. import tf_wrapper as tfw from ..dataclass import BlockArgs from ..mtf_wrapper import relu as _relu, multiply, einsum, constant, sigmoid as _sigmoid, tanh as _tanh, softplus from ..utils_core import random_name, scoped tf1 = tf.compat.v1 class MishForward(mtf.Operation): def __init__(self, x: mtf.Tensor): super().__init__([x], name=random_name("mish_forward")) self._outputs = [mtf.Tensor(self, x.shape, x.dtype)] def gradient(self, grad_ys): return MishBackward(self.inputs[0], grad_ys[0]).outputs def lower(self, lowering): mesh_impl = lowering.mesh_impl(self) def slicewise_fn(x): return tfw.multiply(x, tfw.tanh(tfw.softplus(x))) y = mesh_impl.slicewise(slicewise_fn, lowering.tensors[self.inputs[0]]) lowering.set_tensor_lowering(self.outputs[0], y) class MishBackward(mtf.Operation): def __init__(self, x: mtf.Tensor, dy: mtf.Tensor): super().__init__([x, dy], name=random_name("mish_backward")) self._outputs = [mtf.Tensor(self, x.shape, x.dtype)] def lower(self, lowering): mesh_impl = lowering.mesh_impl(self) def slicewise_fn(x, dy): gte = tfw.tanh(tfw.softplus(x)) gte += 1. - tfw.square(gte) * x * tfw.sigmoid(x) return tfw.multiply(dy, gte) y = mesh_impl.slicewise(slicewise_fn, lowering.tensors[self.inputs[0]], lowering.tensors[self.inputs[1]]) lowering.set_tensor_lowering(self.outputs[0], y) class SiluForward(mtf.Operation): def __init__(self, x: mtf.Tensor): super().__init__([x], name=random_name("silu_forward")) self._outputs = [mtf.Tensor(self, x.shape, x.dtype)] def gradient(self, grad_ys): return SiluBackward(self.inputs[0], grad_ys[0]).outputs def lower(self, lowering): mesh_impl = lowering.mesh_impl(self) def slicewise_fn(x): return tfw.multiply(x, tfw.sigmoid(x)) y = mesh_impl.slicewise(slicewise_fn, lowering.tensors[self.inputs[0]]) lowering.set_tensor_lowering(self.outputs[0], y) class SiluBackward(mtf.Operation): def __init__(self, x: mtf.Tensor, dy: mtf.Tensor): super().__init__([x, dy], name=random_name("silu_backward")) self._outputs = [mtf.Tensor(self, x.shape, x.dtype)] def lower(self, lowering): mesh_impl = lowering.mesh_impl(self) def slicewise_fn(x, dy): gte = tfw.sigmoid(x) return dy * ((x - 1) * gte + 1) y = mesh_impl.slicewise(slicewise_fn, lowering.tensors[self.inputs[0]], lowering.tensors[self.inputs[1]]) lowering.set_tensor_lowering(self.outputs[0], y) class LeCunTanhForward(mtf.Operation): def __init__(self, x: mtf.Tensor): super().__init__([x], name=random_name("lecun_tanh_forward")) self._outputs = [mtf.Tensor(self, x.shape, x.dtype)] def gradient(self, grad_ys): return LeCunTanhBackward(self.inputs[0], grad_ys[0]).outputs def lower(self, lowering): mesh_impl = lowering.mesh_impl(self) def slicewise_fn(x): return tfw.tanh(x) + x * 0.1 y = mesh_impl.slicewise(slicewise_fn, lowering.tensors[self.inputs[0]]) lowering.set_tensor_lowering(self.outputs[0], y) class LeCunTanhBackward(mtf.Operation): def __init__(self, x: mtf.Tensor, dy: mtf.Tensor): super().__init__([x, dy], name=random_name("lecun_tanh_backward")) self._outputs = [mtf.Tensor(self, x.shape, x.dtype)] def lower(self, lowering): mesh_impl = lowering.mesh_impl(self) def slicewise_fn(x, dy): return tfw.multiply(dy, tfw.subtract(1.1, tfw.square(tfw.tanh(x)))) y = mesh_impl.slicewise(slicewise_fn, lowering.tensors[self.inputs[0]], lowering.tensors[self.inputs[1]]) lowering.set_tensor_lowering(self.outputs[0], y) class SoftsignForward(mtf.Operation): def __init__(self, x: mtf.Tensor): super().__init__([x], name=random_name("softsign_forward")) self._outputs = [mtf.Tensor(self, x.shape, x.dtype)] def gradient(self, grad_ys): return SoftsignBackward(self.inputs[0], grad_ys[0]).outputs def lower(self, lowering): mesh_impl = lowering.mesh_impl(self) def slicewise_fn(x): return x / (1. + tfw.abs(x)) y = mesh_impl.slicewise(slicewise_fn, lowering.tensors[self.inputs[0]]) lowering.set_tensor_lowering(self.outputs[0], y) class SoftsignBackward(mtf.Operation): def __init__(self, x: mtf.Tensor, dy: mtf.Tensor): super().__init__([x, dy], name=random_name("softsign_backward")) self._outputs = [mtf.Tensor(self, x.shape, x.dtype)] def lower(self, lowering): mesh_impl = lowering.mesh_impl(self) def slicewise_fn(x, dy): return dy / tfw.square(1. + tfw.abs(x)) y = mesh_impl.slicewise(slicewise_fn, lowering.tensors[self.inputs[0]], lowering.tensors[self.inputs[1]]) lowering.set_tensor_lowering(self.outputs[0], y) def _output0(op): if not issubclass(op, mtf.Operation): raise ValueError def _wrapped(args: BlockArgs): return op(args.tensor).outputs[0] return _wrapped def _gelu(params, tensor: mtf.Tensor): return einsum([tensor, _tanh(einsum([tensor, tensor, tensor, constant(params, 0.044715)], output_shape=tensor.shape) + tensor * np.sqrt(2 / np.pi)) + 1.0, constant(params, 0.5)], output_shape=tensor.shape) def gelu(args: BlockArgs): return scoped("gelu", _gelu, args.params, args.tensor) def relu(args: BlockArgs): return _relu(args.tensor) def sigmoid(args: BlockArgs): return _sigmoid(args.tensor) def tanh(args: BlockArgs): return _tanh(args.tensor) def _mtf_mish(tensor: mtf.Tensor): return multiply(_tanh(softplus(tensor)), tensor) def mtf_mish(args: BlockArgs): return scoped("mtf_mish", _mtf_mish, args.tensor) ACTIVATIONS = {'relu': relu, 'sigmoid': sigmoid, 'tanh': tanh, 'gelu': gelu, 'lecun_tanh': _output0(LeCunTanhForward), 'silu': _output0(SiluForward), 'mish': _output0(MishForward), "mtf_mish": mtf_mish, 'softsign': _output0(SoftsignForward) } def activate(args: BlockArgs) -> mtf.Tensor: """ Call activation function on mtf.Tensor. """ for fn_name in args: if fn_name not in ACTIVATIONS: continue return scoped(fn_name, ACTIVATIONS[fn_name], args) print(f'No activation function found for "{args.name_extras}". Falling back to identity. ' f'Known functions: {list(ACTIVATIONS.keys())}') return args.tensor
32.511848
113
0.645918
912
6,860
4.648026
0.122807
0.050955
0.060392
0.070771
0.615711
0.604152
0.604152
0.604152
0.604152
0.604152
0
0.010648
0.219679
6,860
210
114
32.666667
0.781244
0.005685
0
0.431655
0
0
0.045408
0.006907
0
0
0
0
0
1
0.273381
false
0
0.05036
0.129496
0.546763
0.007194
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
54c61fc4e587efdfd99182b2383a171bf6c66796
332
py
Python
ui.py
fuckTextBooks/fuckTextBooks
41571f61cb201003057060657546d2ac0065b4bf
[ "MIT" ]
null
null
null
ui.py
fuckTextBooks/fuckTextBooks
41571f61cb201003057060657546d2ac0065b4bf
[ "MIT" ]
null
null
null
ui.py
fuckTextBooks/fuckTextBooks
41571f61cb201003057060657546d2ac0065b4bf
[ "MIT" ]
null
null
null
from getpass import getpass def get_username() -> str: u = input("UTORID: ") return u def get_password() -> str: return getpass("Password: ") """Deprecated def get_download_location() -> str: loc = "" while loc == "": loc = input("Download Location \n(i.e. /Documents/textbooks): ") return loc """
20.75
72
0.608434
40
332
4.95
0.55
0.090909
0
0
0
0
0
0
0
0
0
0
0.231928
332
16
73
20.75
0.776471
0
0
0
0
0
0.113924
0
0
0
0
0
0
1
0.333333
false
0.5
0.166667
0.166667
0.833333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
49a82fb3e8e8ae062a605eb2938b8e761eb96123
1,010
py
Python
TripScheduling/Passenger.py
StevenBryceLee/TripScheduling
1bf2513c2fec54ce4f7c44433529e7d1b37e0ff0
[ "MIT" ]
null
null
null
TripScheduling/Passenger.py
StevenBryceLee/TripScheduling
1bf2513c2fec54ce4f7c44433529e7d1b37e0ff0
[ "MIT" ]
null
null
null
TripScheduling/Passenger.py
StevenBryceLee/TripScheduling
1bf2513c2fec54ce4f7c44433529e7d1b37e0ff0
[ "MIT" ]
null
null
null
class Passenger: def __init__(self, name): self.name = name def selectTrip(self, tripOptions): ''' Given a list of trip options, a passenger may select a trip This trip is then added to the trip queue, which allows for later return pricing tripOptions: the queue of given trips available to the passenger This should come from TripPlanner.offerReturnPrices() returns the index of the desired trip ''' for idx, trip in enumerate(tripOptions): print(f'Trip Number: {idx}\n{repr(trip)}') trip_num = 100 while trip_num < 0 and trip_num > len(tripOptions): trip_num = int(input('enter desired trip number: ')) return trip_num def stateSource(self): return input('enter desired source: ') def stateDestination(self): return input('enter desired destination: ') def stateHour(self): return int(input('enter desired hour to leave: '))
32.580645
88
0.628713
127
1,010
4.929134
0.496063
0.055911
0.108626
0.063898
0.086262
0
0
0
0
0
0
0.005602
0.293069
1,010
31
89
32.580645
0.871148
0.29604
0
0
0
0
0.208841
0
0
0
0
0
0
1
0.3125
false
0.0625
0
0.1875
0.625
0.0625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
49b7b1a4153828f8b04ac3f0caadd6d320d0253a
211
py
Python
setup.py
difince/kinney
31bf5a51a1378f4c9e2284739ac353a4f5aa12d3
[ "Apache-2.0" ]
27
2020-05-08T20:45:26.000Z
2022-01-12T02:50:07.000Z
setup.py
difince/kinney
31bf5a51a1378f4c9e2284739ac353a4f5aa12d3
[ "Apache-2.0" ]
26
2020-02-25T22:02:45.000Z
2021-12-13T20:52:29.000Z
setup.py
difince/kinney
31bf5a51a1378f4c9e2284739ac353a4f5aa12d3
[ "Apache-2.0" ]
7
2020-02-14T23:11:50.000Z
2020-09-25T02:34:41.000Z
"""Setuptools script for the "kinney" Python package.""" import setuptools # Package configuration exists solely in `setup.cfg` in order to constrain it to # be simpler and more declarative. setuptools.setup()
30.142857
80
0.772512
29
211
5.62069
0.793103
0
0
0
0
0
0
0
0
0
0
0
0.14218
211
6
81
35.166667
0.900552
0.772512
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
49d6e22c5e68c3af10504ef2b0bc23b157b185d4
188
py
Python
send_key_explame/scan_code/test.py
Llona/hotkey
ec40ccf8212dd166b54bf1e7a462f889fa905424
[ "Apache-2.0" ]
null
null
null
send_key_explame/scan_code/test.py
Llona/hotkey
ec40ccf8212dd166b54bf1e7a462f889fa905424
[ "Apache-2.0" ]
null
null
null
send_key_explame/scan_code/test.py
Llona/hotkey
ec40ccf8212dd166b54bf1e7a462f889fa905424
[ "Apache-2.0" ]
null
null
null
import json import utils import keyboard print(keyboard.normalize_name('DIK_F1')) # with open("test.cfg", 'r', ) as load_f: # load_dict = json.load(load_f) # print(load_dict)
14.461538
41
0.68617
29
188
4.241379
0.62069
0.081301
0
0
0
0
0
0
0
0
0
0.006452
0.175532
188
12
42
15.666667
0.787097
0.5
0
0
0
0
0.066667
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
49f53dac99049066cb6783b071671edb71ff6bab
53
py
Python
one.py
bryanseah234/python-crash-code
a1440ef74fac212e494253e4144d85cbb626228d
[ "MIT" ]
1
2020-11-03T07:52:33.000Z
2020-11-03T07:52:33.000Z
one.py
bryanseah234/python-crash-code
a1440ef74fac212e494253e4144d85cbb626228d
[ "MIT" ]
null
null
null
one.py
bryanseah234/python-crash-code
a1440ef74fac212e494253e4144d85cbb626228d
[ "MIT" ]
null
null
null
from itertools import count as crash list(crash(0))
17.666667
36
0.773585
9
53
4.555556
0.888889
0
0
0
0
0
0
0
0
0
0
0.022222
0.150943
53
2
37
26.5
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
49f613d6ef4cc82539b647b5337f8ea848263940
237
py
Python
classes/models/ModelEncoder.py
canary-for-cognition/merge-datasets-gan
435d62a2cc281e25b9616db0ee6bacd13a12fa9c
[ "MIT" ]
null
null
null
classes/models/ModelEncoder.py
canary-for-cognition/merge-datasets-gan
435d62a2cc281e25b9616db0ee6bacd13a12fa9c
[ "MIT" ]
null
null
null
classes/models/ModelEncoder.py
canary-for-cognition/merge-datasets-gan
435d62a2cc281e25b9616db0ee6bacd13a12fa9c
[ "MIT" ]
null
null
null
from classes.core.Model import Model from classes.modules.Encoder import Encoder class ModelEncoder(Model): def __init__(self): super().__init__() self._network = Encoder() def optimize(self, x): pass
18.230769
43
0.666667
28
237
5.321429
0.607143
0.147651
0
0
0
0
0
0
0
0
0
0
0.236287
237
12
44
19.75
0.823204
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0.125
0.25
0
0.625
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
49f6bad05021ff76b2769ec7d9b34a66ceec5733
711
py
Python
nonebot/adapters/onebot/v12/__init__.py
nonebot/adapter-onebot
65f1a1d906c1291099a23c02acc9450814e9d42a
[ "MIT" ]
13
2021-12-21T10:33:32.000Z
2022-02-26T08:40:14.000Z
nonebot/adapters/onebot/v12/__init__.py
nonebot/adapter-onebot
65f1a1d906c1291099a23c02acc9450814e9d42a
[ "MIT" ]
7
2022-01-13T05:25:25.000Z
2022-03-25T17:58:45.000Z
nonebot/adapters/onebot/v12/__init__.py
nonebot/adapter-onebot
65f1a1d906c1291099a23c02acc9450814e9d42a
[ "MIT" ]
3
2022-01-11T11:28:37.000Z
2022-01-20T02:54:20.000Z
"""OneBot v12 协议适配。 协议详情请看: [OneBot V12](https://12.1bot.dev/) FrontMatter: sidebar_position: 0 description: onebot.v12 模块 """ from nonebot.adapters.onebot.exception import ActionFailed as ActionFailed from nonebot.adapters.onebot.exception import NetworkError as NetworkError from nonebot.adapters.onebot.exception import ApiNotAvailable as ApiNotAvailable from nonebot.adapters.onebot.exception import ( OneBotAdapterException as OneBotAdapterException, ) from .event import * from .permission import * from .bot import Bot as Bot from .log import log as log from .adapter import Adapter as Adapter from .message import Message as Message from .message import MessageSegment as MessageSegment
29.625
80
0.804501
90
711
6.344444
0.344444
0.077058
0.1331
0.175131
0.28021
0.28021
0
0
0
0
0
0.016181
0.130802
711
23
81
30.913043
0.907767
0.181435
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.846154
0
0.846154
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
b71afda126c5e7a9b0edef54a60b8144558e516b
28
py
Python
test/__init__.py
finkbeiner-lab/tidyML
3738b89ca5b40b0b19adcf635f95875e2e20017d
[ "MIT" ]
null
null
null
test/__init__.py
finkbeiner-lab/tidyML
3738b89ca5b40b0b19adcf635f95875e2e20017d
[ "MIT" ]
null
null
null
test/__init__.py
finkbeiner-lab/tidyML
3738b89ca5b40b0b19adcf635f95875e2e20017d
[ "MIT" ]
null
null
null
""" Exported unit tests """
7
19
0.607143
3
28
5.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.178571
28
3
20
9.333333
0.73913
0.678571
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
b732b9748d0fa245275041a3ee4752143530d8af
114
py
Python
python/day01/part1.py
ijanos/advent2015
6f7fda5ed67957e087fadd9638d620f1687484f3
[ "MIT" ]
null
null
null
python/day01/part1.py
ijanos/advent2015
6f7fda5ed67957e087fadd9638d620f1687484f3
[ "MIT" ]
null
null
null
python/day01/part1.py
ijanos/advent2015
6f7fda5ed67957e087fadd9638d620f1687484f3
[ "MIT" ]
null
null
null
#!/bin/env python3 import fileinput for line in fileinput.input(): print(line.count("(") - line.count(")"))
19
44
0.649123
15
114
4.933333
0.733333
0.243243
0
0
0
0
0
0
0
0
0
0.010309
0.149123
114
6
44
19
0.752577
0.149123
0
0
0
0
0.020833
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.333333
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
b7340a5c6e187ea41de2eb1af71ccaab6eee53e0
17
py
Python
data/studio21_generated/introductory/4142/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4142/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
data/studio21_generated/introductory/4142/starter_code.py
vijaykumawat256/Prompt-Summarization
614f5911e2acd2933440d909de2b4f86653dc214
[ "Apache-2.0" ]
null
null
null
def solve(arr):
8.5
15
0.647059
3
17
3.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.176471
17
2
16
8.5
0.785714
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
3f898af703590cf04e115b48656029dbb5657680
200
py
Python
tests/test_version.py
LucaCappelletti94/transpose_dict
d3ce6fcf23ad7fddb654ecf89f618c59d62a7b35
[ "MIT" ]
1
2021-10-11T18:19:14.000Z
2021-10-11T18:19:14.000Z
tests/test_version.py
LucaCappelletti94/transpose_dict
d3ce6fcf23ad7fddb654ecf89f618c59d62a7b35
[ "MIT" ]
null
null
null
tests/test_version.py
LucaCappelletti94/transpose_dict
d3ce6fcf23ad7fddb654ecf89f618c59d62a7b35
[ "MIT" ]
null
null
null
"""Test for version file syntax.""" import re from transpose_dict.__version__ import __version__ def test_version(): pattern = re.compile(r"\d+\.\d+\.\d+") assert pattern.match(__version__)
22.222222
50
0.71
27
200
4.740741
0.62963
0.03125
0
0
0
0
0
0
0
0
0
0
0.14
200
9
51
22.222222
0.744186
0.145
0
0
0
0
0.078313
0
0
0
0
0
0.2
1
0.2
false
0
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
3f945b029203aa02cb32771307a6bd5412afc839
173
py
Python
transport/tests/__init__.py
zkdev/cc-utils
042c6632ca6f61a484bc0a71f85957aeba7f7278
[ "BSD-3-Clause" ]
15
2018-04-18T13:25:30.000Z
2022-03-04T09:25:41.000Z
transport/tests/__init__.py
zkdev/cc-utils
042c6632ca6f61a484bc0a71f85957aeba7f7278
[ "BSD-3-Clause" ]
221
2018-04-12T06:29:43.000Z
2022-03-27T03:01:40.000Z
transport/tests/__init__.py
zkdev/cc-utils
042c6632ca6f61a484bc0a71f85957aeba7f7278
[ "BSD-3-Clause" ]
29
2018-04-11T14:42:23.000Z
2021-11-09T16:26:32.000Z
import sys import os own_dir = os.path.abspath(os.path.dirname(__name__)) repo_root = os.path.abspath(os.path.join(own_dir, os.path.pardir)) sys.path.insert(1, repo_root)
21.625
66
0.763006
32
173
3.875
0.46875
0.241935
0.129032
0.193548
0.306452
0
0
0
0
0
0
0.006329
0.086705
173
7
67
24.714286
0.778481
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
3f9df0f3a0a6016739ed445e549fc615e528ff14
112
py
Python
Case7.py
ciracheta99/TestLinkPython
563bf24dac6c2309bd5989767c30a2e70e6c0f68
[ "Apache-2.0" ]
1
2022-01-18T07:48:24.000Z
2022-01-18T07:48:24.000Z
Case7.py
ciracheta99/TestLinkPython
563bf24dac6c2309bd5989767c30a2e70e6c0f68
[ "Apache-2.0" ]
null
null
null
Case7.py
ciracheta99/TestLinkPython
563bf24dac6c2309bd5989767c30a2e70e6c0f68
[ "Apache-2.0" ]
null
null
null
print ('Inside Case 7') self.logResult("Just checking if a log file gets generated") self.reportTCResults("p")
37.333333
61
0.75
17
112
4.941176
0.941176
0
0
0
0
0
0
0
0
0
0
0.010204
0.125
112
3
62
37.333333
0.846939
0
0
0
1
0
0.504505
0
0
0
0
0
0
1
0
true
0
0
0
0
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
3fb7ed37a63870d807d69c796bf9cacea7ad9b72
89
py
Python
okcupid/apps.py
ealmuina/statsproject
43186bdb213202ff846b18e677b89abfc233bca3
[ "MIT" ]
null
null
null
okcupid/apps.py
ealmuina/statsproject
43186bdb213202ff846b18e677b89abfc233bca3
[ "MIT" ]
null
null
null
okcupid/apps.py
ealmuina/statsproject
43186bdb213202ff846b18e677b89abfc233bca3
[ "MIT" ]
null
null
null
from django.apps import AppConfig class OkcupidConfig(AppConfig): name = 'okcupid'
14.833333
33
0.752809
10
89
6.7
0.9
0
0
0
0
0
0
0
0
0
0
0
0.168539
89
5
34
17.8
0.905405
0
0
0
0
0
0.078652
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
3fbd235306ae4985b02f3fa36eaca50647a0e931
210
py
Python
scieio/viscometers/apps.py
arnelimperial/scieio
279a25766f20d074a3df824c0fbc8b2d8e35f272
[ "MIT" ]
null
null
null
scieio/viscometers/apps.py
arnelimperial/scieio
279a25766f20d074a3df824c0fbc8b2d8e35f272
[ "MIT" ]
8
2021-03-19T01:56:44.000Z
2022-03-12T00:24:21.000Z
scieio/viscometers/apps.py
arnelimperial/scieio
279a25766f20d074a3df824c0fbc8b2d8e35f272
[ "MIT" ]
null
null
null
from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ class ViscometersConfig(AppConfig): name = 'scieio.viscometers' verbose_name = _("Viscometers and Rheometers")
26.25
54
0.785714
24
210
6.708333
0.75
0.124224
0
0
0
0
0
0
0
0
0
0
0.142857
210
7
55
30
0.894444
0
0
0
0
0
0.209524
0
0
0
0
0
0
1
0
false
0
0.4
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
3fd5d933fbabb187873f90255cf0154903357ea7
9,513
py
Python
TwoThreeTree/test_two_three_tree.py
alexander-dejeu/CodeForMediumArticles
186048463fbf8d8e7095cff3f91c27bfceeec3a4
[ "MIT" ]
null
null
null
TwoThreeTree/test_two_three_tree.py
alexander-dejeu/CodeForMediumArticles
186048463fbf8d8e7095cff3f91c27bfceeec3a4
[ "MIT" ]
null
null
null
TwoThreeTree/test_two_three_tree.py
alexander-dejeu/CodeForMediumArticles
186048463fbf8d8e7095cff3f91c27bfceeec3a4
[ "MIT" ]
null
null
null
from starter_two_three_tree import TwoThreeTree, Node import unittest class NodeTest(unittest.TestCase): def test_init(self): data = 1 node = Node(data) assert node.data[0] == data assert len(node.data) == 1 assert len(node.children) == 0 assert node.parent is None def test_init_list_of_data(self): data = [2, 3, 4] node = Node(*data) assert node.data[2] == data[2] assert len(node.data) == 3 assert len(node.children) == 0 assert node.parent is None def test_init_with_args(self): node = Node(1, 2, 3) assert node.data[2] == 3 assert len(node.data) == 3 assert len(node.children) == 0 assert node.parent is None def test_init_full(self): parent = Node(3) data = 1 node_children = [Node(2), Node(4)] node = Node(data, children=node_children, parent=parent) parent.children.append(node) assert node.data[0] == 1 assert node.parent == parent assert parent.children[0] == node assert len(parent.children) == 1 assert len(node.children) == 2 assert len(node.children[0].children) == 0 class TwoThreeTreeTest(unittest.TestCase): def test_init(self): ttt = TwoThreeTree() assert ttt.root is None def test_first_root(self): ttt = TwoThreeTree() ttt.insert(4) assert ttt.root is not None assert ttt.root.data[0] == 4 assert len(ttt.root.data) == 1 assert ttt.root.parent is None assert len(ttt.root.children) == 0 def test_first_split(self): ttt = TwoThreeTree() ttt.insert(4) ttt.insert(30) # It is important that the values remain sorted so going to Check assert ttt.root.data[0] == 4 assert ttt.root.data[1] == 30 ttt.insert(7) assert len(ttt.root.data) == 1 assert ttt.root.data[0] == 7 assert len(ttt.root.children) == 2 assert ttt.root.children[0].data[0] == 4 assert ttt.root.children[0].parent is ttt.root assert ttt.root.children[1].data[0] == 30 assert ttt.root.children[1].parent is ttt.root def test_split_leaf(self): ttt = TwoThreeTree() ttt.insert(4) ttt.insert(30) ttt.insert(7) ttt.insert(5) ttt.insert(3) assert len(ttt.root.children[0].data) == 1 assert len(ttt.root.children[1].data) == 1 assert len(ttt.root.children[2].data) == 1 assert ttt.root.children[0].parent is ttt.root assert ttt.root.children[1].parent is ttt.root assert ttt.root.children[2].parent is ttt.root assert len(ttt.root.data) == 2 assert ttt.root.data[0] == 4 assert ttt.root.data[1] == 7 assert len(ttt.root.children[0].data) == 1 assert len(ttt.root.children[1].data) == 1 assert len(ttt.root.children[2].data) == 1 assert ttt.root.children[0].data[0] == 3 assert ttt.root.children[1].data[0] == 5 assert ttt.root.children[2].data[0] == 30 def test_full_two_level(self): ttt = TwoThreeTree() ttt.insert(4) ttt.insert(30) ttt.insert(7) ttt.insert(5) ttt.insert(3) ttt.insert(6) ttt.insert(2) ttt.insert(36) assert len(ttt.root.children[0].data) == 2 assert len(ttt.root.children[1].data) == 2 assert len(ttt.root.children[2].data) == 2 assert ttt.root.children[0].parent is ttt.root assert ttt.root.children[1].parent is ttt.root assert ttt.root.children[2].parent is ttt.root assert ttt.root.data[0] == 4 assert ttt.root.data[1] == 7 def test_full_two_level_split(self): ttt = TwoThreeTree() ttt.insert(4) ttt.insert(30) ttt.insert(7) ttt.insert(5) ttt.insert(3) ttt.insert(6) ttt.insert(2) ttt.insert(36) ttt.insert(1) assert len(ttt.root.children[0].children[0].data) == 1 assert len(ttt.root.children[0].children[1].data) == 1 assert len(ttt.root.children[1].children[0].data) == 2 assert len(ttt.root.children[1].children[1].data) == 2 assert len(ttt.root.data) == 1 assert len(ttt.root.children) == 2 assert len(ttt.root.children[1].data) == 1 assert ttt.root.data[0] == 4 assert ttt.root.children[0].data[0] == 2 assert ttt.root.children[1].data[0] == 7 assert ttt.root.children[0].parent is ttt.root assert ttt.root.children[1].parent is ttt.root assert ttt.root.children[0].children[0].parent is ttt.root.children[0] assert ttt.root.children[0].children[1].parent is ttt.root.children[0] assert ttt.root.children[1].children[0].parent is ttt.root.children[1] assert ttt.root.children[1].children[1].parent is ttt.root.children[1] def test_middle_split(self): ttt = TwoThreeTree() ttt.insert(10) ttt.insert(20) ttt.insert(30) ttt.insert(60) ttt.insert(70) ttt.insert(50) ttt.insert(40) assert len(ttt.root.children) == 2 assert ttt.root.children[0].data[0] == 20 assert ttt.root.children[1].data[0] == 60 assert len(ttt.root.children[0].children) == 2 assert ttt.root.children[0].children[0].data[0] == 10 assert ttt.root.children[0].children[1].data[0] == 30 assert len(ttt.root.children[1].children) == 2 assert ttt.root.children[1].children[0].data[0] == 50 assert ttt.root.children[1].children[1].data[0] == 70 def test_fill_full_three_level(self): ttt = TwoThreeTree() ttt.insert(4) ttt.insert(30) ttt.insert(7) ttt.insert(5) ttt.insert(3) ttt.insert(6) ttt.insert(2) ttt.insert(36) ttt.insert(1) ttt.insert(3) ttt.insert(0) ttt.insert(1) ttt.insert(40) ttt.insert(0) ttt.insert(2) ttt.insert(25) ttt.insert(41) assert len(ttt.root.children[0].children[0].data) == 2 assert len(ttt.root.children[0].children[1].data) == 2 assert len(ttt.root.children[1].children[0].data) == 2 assert len(ttt.root.children[1].children[1].data) == 2 assert len(ttt.root.data) == 1 assert len(ttt.root.children) == 2 assert len(ttt.root.children[1].data) == 2 assert ttt.root.children[0].data[0] == 1 assert ttt.root.children[0].data[1] == 2 assert ttt.root.children[0].children[0].parent == ttt.root.children[0] # At this point we are feeling better with the tests because we know # our split function can split a few times up the tree without fail def test_full_three_level_split(self): ttt = TwoThreeTree() ttt.insert(4) ttt.insert(30) ttt.insert(7) ttt.insert(5) ttt.insert(3) ttt.insert(6) ttt.insert(2) ttt.insert(36) ttt.insert(1) ttt.insert(3) ttt.insert(0) ttt.insert(1) ttt.insert(40) ttt.insert(0) ttt.insert(2) ttt.insert(25) ttt.insert(41) ttt.insert(45) # Pushed data up to the root correctly assert len(ttt.root.data) == 2 assert ttt.root.data[0] == 4 assert ttt.root.data[1] == 36 # Make sure the 2nd layer data looks right assert len(ttt.root.children[0].data) == 2 assert ttt.root.children[1].data[0] == 7 assert ttt.root.children[2].data[0] == 41 # Make sure the childen's parent relationships are correct assert ttt.root.children[2].parent is ttt.root assert ttt.root.children[2].children[0].parent is ttt.root.children[2] assert ttt.root.children[2].children[1].parent is ttt.root.children[2] assert ttt.root.children[1].children[0].parent is ttt.root.children[1] assert ttt.root.children[1].children[1].parent is ttt.root.children[1] def test_search(self): ttt = TwoThreeTree() ttt.insert(4) ttt.insert(30) ttt.insert(7) ttt.insert(5) ttt.insert(3) ttt.insert(6) ttt.insert(2) ttt.insert(36) ttt.insert(1) ttt.insert(3) ttt.insert(0) ttt.insert(1) ttt.insert(40) ttt.insert(0) ttt.insert(2) ttt.insert(25) ttt.insert(41) ttt.insert(45) assert ttt.search(4) is True assert ttt.search(30) is True assert ttt.search(7) is True assert ttt.search(5) is True assert ttt.search(3) is True assert ttt.search(6) is True assert ttt.search(2) is True assert ttt.search(36) is True assert ttt.search(1) is True assert ttt.search(0) is True assert ttt.search(40) is True assert ttt.search(25) is True assert ttt.search(41) is True assert ttt.search(45) is True assert ttt.search(12) is False assert ttt.search(-4) is False assert ttt.search(49) is False assert ttt.search(57) is False assert ttt.search(101) is False assert ttt.search(124) is False def test_search_empty_tree(self): ttt = TwoThreeTree() if __name__ == '__main__': unittest.main()
31.922819
78
0.582151
1,400
9,513
3.919286
0.080714
0.132677
0.199563
0.141607
0.813559
0.719701
0.681611
0.622745
0.602515
0.528522
0
0.054277
0.287291
9,513
297
79
32.030303
0.755015
0.034794
0
0.616
0
0
0.000872
0
0
0
0
0
0.488
1
0.06
false
0
0.008
0
0.076
0
0
0
0
null
0
1
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
3fe4e8215c8616ad5d38caffdbec7d30e6ddf83b
123
py
Python
src/ingest-pipeline/md/type_base.py
AustinHartman/ingest-pipeline
788d9310792c9396a38650deda3dad11483b368c
[ "MIT" ]
6
2020-02-18T19:09:59.000Z
2021-10-07T20:38:46.000Z
src/ingest-pipeline/md/type_base.py
AustinHartman/ingest-pipeline
788d9310792c9396a38650deda3dad11483b368c
[ "MIT" ]
324
2020-02-06T22:08:50.000Z
2022-03-24T20:44:33.000Z
src/ingest-pipeline/md/type_base.py
AustinHartman/ingest-pipeline
788d9310792c9396a38650deda3dad11483b368c
[ "MIT" ]
2
2020-07-20T14:43:49.000Z
2021-10-29T18:24:36.000Z
#! /usr/bin/env python """ Some type definitions for metadata extraction """ class MetadataError(RuntimeError): pass
13.666667
45
0.723577
14
123
6.357143
1
0
0
0
0
0
0
0
0
0
0
0
0.162602
123
8
46
15.375
0.864078
0.544715
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
b74750d5a71d32dc52804691456ca2ca5ef2fabf
21
py
Python
psslib/__init__.py
bunyk/pss
d903f187b69ea2282b79b730454a041dd0c5f007
[ "Unlicense" ]
null
null
null
psslib/__init__.py
bunyk/pss
d903f187b69ea2282b79b730454a041dd0c5f007
[ "Unlicense" ]
null
null
null
psslib/__init__.py
bunyk/pss
d903f187b69ea2282b79b730454a041dd0c5f007
[ "Unlicense" ]
null
null
null
__version__ = '1.42'
10.5
20
0.666667
3
21
3.333333
1
0
0
0
0
0
0
0
0
0
0
0.166667
0.142857
21
1
21
21
0.388889
0
0
0
0
0
0.190476
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
b750844f1bfaec19347572eeff54e7e6aa632c72
165
py
Python
binaryCycleSearch.py
Case-y/algorithms
48e3c183269bf6108b6d2c0c4dece687ecf01e90
[ "WTFPL" ]
null
null
null
binaryCycleSearch.py
Case-y/algorithms
48e3c183269bf6108b6d2c0c4dece687ecf01e90
[ "WTFPL" ]
null
null
null
binaryCycleSearch.py
Case-y/algorithms
48e3c183269bf6108b6d2c0c4dece687ecf01e90
[ "WTFPL" ]
null
null
null
def uncycle(list): if len(list) <= 3: return max(list) m = int(len(list) / 2) if list[0] < list[m]: return uncycle(list[m:]) else: return uncycle(list[:m])
20.625
26
0.612121
29
165
3.482759
0.448276
0.19802
0.336634
0.356436
0
0
0
0
0
0
0
0.022388
0.187879
165
8
27
20.625
0.731343
0
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0
0
0.5
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
b7517d25347560d5452deac6a15e237b69af37f9
199
py
Python
api/__init__.py
nosoyyo/blox
f42913759a96d7ba387accc31dd3255b32b68f14
[ "MIT" ]
null
null
null
api/__init__.py
nosoyyo/blox
f42913759a96d7ba387accc31dd3255b32b68f14
[ "MIT" ]
null
null
null
api/__init__.py
nosoyyo/blox
f42913759a96d7ba387accc31dd3255b32b68f14
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # @absurdity v0.3 app __author__ = 'nosoyyo' from .block import * from .task import * from .note import * from .command import * from .control import *
18.090909
23
0.673367
28
199
4.642857
0.714286
0.307692
0
0
0
0
0
0
0
0
0
0.018293
0.175879
199
11
24
18.090909
0.77439
0.311558
0
0
0
0
0.051852
0
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b7535c0e8e2eddb01bcc755085fa38f94c8efe2a
610
py
Python
arrp/utils/paths.py
LucaCappelletti94/arrp_dataset
bcea455a504e8ff718458ce12623c63e0314badb
[ "MIT" ]
null
null
null
arrp/utils/paths.py
LucaCappelletti94/arrp_dataset
bcea455a504e8ff718458ce12623c63e0314badb
[ "MIT" ]
null
null
null
arrp/utils/paths.py
LucaCappelletti94/arrp_dataset
bcea455a504e8ff718458ce12623c63e0314badb
[ "MIT" ]
null
null
null
def _build_csv_path(target:str, directory:str, cell_line:str): return "{target}/{directory}/{cell_line}.csv".format( target=target, directory=directory, cell_line=cell_line ) def get_raw_epigenomic_data_path(target:str, cell_line:str): return _build_csv_path(target, "epigenomic_data", cell_line) def get_raw_nucleotides_sequences_path(target:str, cell_line:str): return _build_csv_path(target, "one_hot_encoded_expanded_regions", cell_line) def get_raw_classes_path(target:str, cell_line:str): return _build_csv_path(target, "one_hot_encoded_classes", cell_line)
40.666667
81
0.770492
90
610
4.766667
0.255556
0.18648
0.111888
0.167832
0.561772
0.39627
0.39627
0.39627
0.39627
0.39627
0
0
0.12623
610
15
82
40.666667
0.804878
0
0
0
0
0
0.173486
0.148936
0
0
0
0
0
1
0.333333
false
0
0
0.333333
0.666667
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
b75874a29a2a0ced6c1e6a4e8c1e13651744c6ad
1,769
py
Python
imgaug/augmenters/overlay.py
fmder/imgaug
4c81c7a7503b64f54d76144385ea4330fd7c8a84
[ "MIT" ]
1
2019-10-27T19:17:18.000Z
2019-10-27T19:17:18.000Z
imgaug/augmenters/overlay.py
fmder/imgaug
4c81c7a7503b64f54d76144385ea4330fd7c8a84
[ "MIT" ]
null
null
null
imgaug/augmenters/overlay.py
fmder/imgaug
4c81c7a7503b64f54d76144385ea4330fd7c8a84
[ "MIT" ]
null
null
null
"""Alias for module blend. Deprecated module. Original name for module blend.py. Was changed in 0.2.8. """ from __future__ import print_function, division, absolute_import import imgaug as ia from . import blend @ia.deprecated(alt_func="imgaug.augmenters.blend.blend_alpha()", comment="It has the exactly same interface.") def blend_alpha(*args, **kwargs): """See :func:`imgaug.augmenters.blend.blend_alpha`.""" # pylint: disable=invalid-name return blend.blend_alpha(*args, **kwargs) @ia.deprecated(alt_func="imgaug.augmenters.blend.Alpha", comment="It has the exactly same interface.") def Alpha(*args, **kwargs): """See :func:`imgaug.augmenters.blend.Alpha`.""" # pylint: disable=invalid-name return blend.Alpha(*args, **kwargs) @ia.deprecated(alt_func="imgaug.augmenters.blend.AlphaElementwise", comment="It has the exactly same interface.") def AlphaElementwise(*args, **kwargs): """See :func:`imgaug.augmenters.blend.AlphaElementwise`.""" # pylint: disable=invalid-name return blend.AlphaElementwise(*args, **kwargs) @ia.deprecated(alt_func="imgaug.augmenters.blend.SimplexNoiseAlpha", comment="It has the exactly same interface.") def SimplexNoiseAlpha(*args, **kwargs): """See :func:`imgaug.augmenters.blend.SimplexNoiseAlpha`.""" # pylint: disable=invalid-name return blend.SimplexNoiseAlpha(*args, **kwargs) @ia.deprecated(alt_func="imgaug.augmenters.blend.FrequencyNoiseAlpha", comment="It has the exactly same interface.") def FrequencyNoiseAlpha(*args, **kwargs): """See :func:`imgaug.augmenters.blend.FrequencyNoiseAlpha`.""" # pylint: disable=invalid-name return blend.FrequencyNoiseAlpha(*args, **kwargs)
35.38
75
0.705483
207
1,769
5.956522
0.222222
0.081103
0.162206
0.202758
0.773723
0.689376
0.604217
0.479319
0.25223
0.171127
0
0.001999
0.151498
1,769
49
76
36.102041
0.819454
0.284907
0
0.217391
0
0
0.29316
0.154723
0
0
0
0
0
1
0.217391
true
0
0.130435
0
0.565217
0.043478
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
4
b75ddff876b51793954a78860e9a7cceb1c1264e
558
py
Python
src/utilities/database/delete.py
smccaffrey/plex-etl
341e86f22ae17fa0b48a7ca50ff0a1fc14b6578a
[ "Apache-2.0" ]
7
2020-03-08T05:52:26.000Z
2022-03-18T12:32:50.000Z
src/utilities/database/delete.py
smccaffrey/plex-etl
341e86f22ae17fa0b48a7ca50ff0a1fc14b6578a
[ "Apache-2.0" ]
5
2020-03-15T05:12:08.000Z
2020-03-15T18:18:38.000Z
src/utilities/database/delete.py
smccaffrey/plex-etl
341e86f22ae17fa0b48a7ca50ff0a1fc14b6578a
[ "Apache-2.0" ]
1
2020-10-05T14:08:21.000Z
2020-10-05T14:08:21.000Z
import logging from src.utilities.database.models import db from src.utilities.database.database import create_or_update from src.utilities.database.models import ExtractedMovies from src.utilities.database.models import TransformedMovies from src.utilities.database.models import LoadMovies class Delete: def __init__(self): return @staticmethod @create_or_update def all(): db.session.query(ExtractedMovies).delete() db.session.query(TransformedMovies).delete() db.session.query(LoadMovies).delete()
23.25
60
0.758065
66
558
6.287879
0.363636
0.084337
0.192771
0.289157
0.346988
0.346988
0
0
0
0
0
0
0.16129
558
23
61
24.26087
0.886752
0
0
0
0
0
0
0
0
0
0
0
0
1
0.133333
false
0
0.4
0.066667
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b77f59eb8155d255dff90f7bed8a4535d43c2c14
157
py
Python
tests/__init__.py
razor-1/aiociscospark
69cfe41a8f1fda788cbd5b3a77e84ce04e6d562d
[ "MIT" ]
8
2017-12-29T19:03:27.000Z
2020-08-17T06:53:58.000Z
tests/__init__.py
razor-1/aiociscospark
69cfe41a8f1fda788cbd5b3a77e84ce04e6d562d
[ "MIT" ]
119
2017-10-28T10:27:38.000Z
2020-03-16T05:19:45.000Z
tests/__init__.py
razor-1/aiociscospark
69cfe41a8f1fda788cbd5b3a77e84ce04e6d562d
[ "MIT" ]
5
2018-05-08T17:49:32.000Z
2019-06-05T17:34:29.000Z
# Ideally "tests" directory should not be treated as a package (no __init__.py file). # Read more: http://docs.python-guide.org/en/latest/writing/structure/
52.333333
85
0.757962
25
157
4.6
1
0
0
0
0
0
0
0
0
0
0
0
0.11465
157
2
86
78.5
0.827338
0.968153
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
4d0c2e92e8f10e7a88c0af94da460e2ccd1489e1
40
py
Python
buddy/__init__.py
ucbrise/buddy
24908db7fba651f2b586a2c2b03e02b805a1f1b2
[ "Apache-2.0" ]
1
2020-11-29T19:42:59.000Z
2020-11-29T19:42:59.000Z
buddy/__init__.py
ucbrise/buddy
24908db7fba651f2b586a2c2b03e02b805a1f1b2
[ "Apache-2.0" ]
null
null
null
buddy/__init__.py
ucbrise/buddy
24908db7fba651f2b586a2c2b03e02b805a1f1b2
[ "Apache-2.0" ]
null
null
null
from . import study __all__ = ['study']
13.333333
19
0.675
5
40
4.6
0.8
0
0
0
0
0
0
0
0
0
0
0
0.175
40
3
20
13.333333
0.69697
0
0
0
0
0
0.121951
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
4d5056441d8c778aff8a5c0cacb5ef954568872e
65
py
Python
src/cobra/apps/auditlog/__init__.py
lyoniionly/django-cobra
2427e5cf74b7739115b1224da3306986b3ee345c
[ "Apache-2.0" ]
1
2015-01-27T08:56:46.000Z
2015-01-27T08:56:46.000Z
src/cobra/apps/auditlog/__init__.py
lyoniionly/django-cobra
2427e5cf74b7739115b1224da3306986b3ee345c
[ "Apache-2.0" ]
null
null
null
src/cobra/apps/auditlog/__init__.py
lyoniionly/django-cobra
2427e5cf74b7739115b1224da3306986b3ee345c
[ "Apache-2.0" ]
null
null
null
default_app_config = 'cobra.apps.auditlog.config.AuditLogConfig'
32.5
64
0.846154
8
65
6.625
0.875
0
0
0
0
0
0
0
0
0
0
0
0.046154
65
1
65
65
0.854839
0
0
0
0
0
0.630769
0.630769
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4d6a7c1564593425f064066473b88a96d2d41539
159
py
Python
packages/auto-nlp-deployment/src/common/runtimes/__init__.py
fhswf/tagflip-autonlp
f94abb35ed06198567e5d9cbb7abb7e112149d6c
[ "MIT" ]
4
2021-10-05T17:34:02.000Z
2022-03-23T07:33:19.000Z
packages/auto-nlp-deployment/src/common/runtimes/__init__.py
fhswf/tagflip-autonlp
f94abb35ed06198567e5d9cbb7abb7e112149d6c
[ "MIT" ]
11
2022-03-01T14:37:52.000Z
2022-03-31T05:11:23.000Z
packages/auto-nlp-deployment/src/common/runtimes/__init__.py
fhswf/tagflip-autonlp
f94abb35ed06198567e5d9cbb7abb7e112149d6c
[ "MIT" ]
1
2022-01-29T13:32:22.000Z
2022-01-29T13:32:22.000Z
from .runtime import Runtime from .runtime_config import RuntimeConfig from .ssh_config import SSHConfig from .parameter_definition import ParameterDefinition
31.8
53
0.874214
19
159
7.157895
0.526316
0.161765
0
0
0
0
0
0
0
0
0
0
0.100629
159
4
54
39.75
0.951049
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
4d7eb180ca04b15ec5785a17f92b4f888e11fc9b
116
py
Python
src/project/cli/__init__.py
DeviaVir/super-secret-project
cae0ac6be32b28a8d3cebdb023d2984d60433b3a
[ "MIT" ]
null
null
null
src/project/cli/__init__.py
DeviaVir/super-secret-project
cae0ac6be32b28a8d3cebdb023d2984d60433b3a
[ "MIT" ]
null
null
null
src/project/cli/__init__.py
DeviaVir/super-secret-project
cae0ac6be32b28a8d3cebdb023d2984d60433b3a
[ "MIT" ]
null
null
null
import click from .memcache import memcache_cmd CMDS = (memcache_cmd,) CLI = click.CommandCollection(sources=CMDS)
19.333333
43
0.801724
15
116
6.066667
0.6
0.241758
0
0
0
0
0
0
0
0
0
0
0.112069
116
5
44
23.2
0.883495
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
4d9031a690d7da5262992f455e71525186ac63f4
3,322
py
Python
web/kwmo/kwmo/lib/uimessage.py
tmbx/kas
0d4e74d0a8ec0e0f85ba574eb01d389530bdeecc
[ "BSD-3-Clause" ]
null
null
null
web/kwmo/kwmo/lib/uimessage.py
tmbx/kas
0d4e74d0a8ec0e0f85ba574eb01d389530bdeecc
[ "BSD-3-Clause" ]
null
null
null
web/kwmo/kwmo/lib/uimessage.py
tmbx/kas
0d4e74d0a8ec0e0f85ba574eb01d389530bdeecc
[ "BSD-3-Clause" ]
null
null
null
from pylons import session, tmpl_context as c from strings import message_codes_map # Show an information message in the web interface. def ui_info(code=None, message=None, hide_after_ms=None): uim = UIMessage.info(code=code, message=message, hide_after_ms=hide_after_ms) c.glob_messages.append(uim) # Show a warning message in the web interface. def ui_warn(code=None, message=None, hide_after_ms=None): uim = UIMessage.warn(code=code, message=message, hide_after_ms=hide_after_ms) c.glob_messages.append(uim) # Show an error message in the web interface. def ui_error(code=None, message=None, hide_after_ms=None): uim = UIMessage.error(code=code, message=message, hide_after_ms=hide_after_ms) c.glob_messages.append(uim) # Show an information message in the web interface (at next request). def ui_flash_info(code=None, message=None, hide_after_ms=None): uim = UIMessage.info(code=code, message=message, hide_after_ms=hide_after_ms) session['uimessage'] = uim session.save() # Show a warning message in the web interface (at next request). def ui_flash_warn(code=None, message=None, hide_after_ms=None): uim = UIMessage.warn(code=code, message=message, hide_after_ms=hide_after_ms) session['uimessage'] = uim session.save() # Show an error message in the web interface (at next request). def ui_flash_error(code=None, message=None, hide_after_ms=None): uim = UIMessage.error(code=code, message=message, hide_after_ms=hide_after_ms) session['uimessage'] = uim session.save() # UIMessage object. class UIMessage(object): def __init__(self): self.reset() def reset(self): self.type = None self.message = None self.hide_after_ms = None def from_dict(self, d): self.reset() if d.has_key('type'): self.type = d['type'] if d.has_key('message'): self.message = d['message'] if d.has_key('hide_after_ms'): self.hide_after_ms = d['hide_after_ms'] # Return self, although changes happen in place too. return self def to_dict(self): return {'type' : self.type, 'message' : self.message, 'hide_after_ms' : self.hide_after_ms} def set_code(self, code): if message_codes_map.has_key(code): self.message = message_codes_map[code] else: self.message = message_codes_map['unknown_code'] def __repr__(self): return "<%s type='%s' message='%s' hide_after_ms='%s'>" % ( self.__class__.__name__, self.type, self.message, str(self.hide_after_ms) ) @staticmethod def info(code=None, message=None, hide_after_ms=None): uim = UIMessage() uim.type = 'info' uim.set_code(code) if message: uim.message = message uim.hide_after_ms = hide_after_ms return uim @staticmethod def warn(code=None, message=None, hide_after_ms=None): uim = UIMessage() uim.type = 'warn' uim.set_code(code) if message: uim.message = message uim.hide_after_ms = hide_after_ms return uim @staticmethod def error(code=None, message=None, hide_after_ms=None): uim = UIMessage() uim.type = 'error' uim.set_code(code) if message: uim.message = message uim.hide_after_ms = hide_after_ms return uim
35.340426
143
0.685129
493
3,322
4.377282
0.135903
0.145968
0.178406
0.069509
0.734013
0.709917
0.709917
0.678869
0.619555
0.619555
0
0
0.204696
3,322
93
144
35.72043
0.816805
0.120409
0
0.522388
0
0
0.058379
0
0
0
0
0
0
1
0.223881
false
0
0.029851
0.029851
0.358209
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
4db0b8fe5069365ed7cdc97e5964b3b83c49f23d
114
py
Python
settings.py
rchien6191/demo
10ed245b06539d53f1ac108e2077a896f05a6439
[ "MIT" ]
null
null
null
settings.py
rchien6191/demo
10ed245b06539d53f1ac108e2077a896f05a6439
[ "MIT" ]
null
null
null
settings.py
rchien6191/demo
10ed245b06539d53f1ac108e2077a896f05a6439
[ "MIT" ]
null
null
null
TITLE = "Jump game" WIDTH = 480 HEIGHT = 600 FPS = 30 WHITE = (255, 255, 255) BLACK = (0,0,0) RED = (240, 55, 66)
14.25
23
0.587719
21
114
3.190476
0.809524
0.179104
0
0
0
0
0
0
0
0
0
0.306818
0.22807
114
8
24
14.25
0.454545
0
0
0
0
0
0.078261
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4dd0c97dc8bbee9310ebc5f011355bc0d891d28f
99
py
Python
vehicles/models/tools.py
kackey0-1/drf-sample
914907320bc317240b4d7c07968b6d4ea80b4511
[ "MIT" ]
null
null
null
vehicles/models/tools.py
kackey0-1/drf-sample
914907320bc317240b4d7c07968b6d4ea80b4511
[ "MIT" ]
6
2021-03-30T12:05:07.000Z
2021-04-05T14:21:46.000Z
vehicles/models/tools.py
kackey0-1/drf-sample
914907320bc317240b4d7c07968b6d4ea80b4511
[ "MIT" ]
null
null
null
class Tool: def __init__(self, name, make): self.name = name self.make = make
16.5
35
0.565657
13
99
4
0.538462
0.307692
0
0
0
0
0
0
0
0
0
0
0.333333
99
5
36
19.8
0.787879
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
1283e72d54accb0165e3a7fdf82827960ace0957
907
py
Python
note/models.py
pjkui/notepad
99f106c27e584709e5ac583e3cc64a506c4eacc7
[ "MIT" ]
1
2015-10-16T15:55:00.000Z
2015-10-16T15:55:00.000Z
note/models.py
pjkui/notepad
99f106c27e584709e5ac583e3cc64a506c4eacc7
[ "MIT" ]
null
null
null
note/models.py
pjkui/notepad
99f106c27e584709e5ac583e3cc64a506c4eacc7
[ "MIT" ]
null
null
null
from django.db import models # Create your models here. class Note(models.Model): title = models.CharField(max_length=60) color = models.CharField(max_length=10) authorID = models.IntegerField() noteType = models.IntegerField() createTime = models.DateTimeField() content = models.TextField() def __unicode__(self): return self.title class User(models.Model): username = models.CharField(max_length=40) password = models.CharField(max_length=40) nickname = models.CharField(max_length=40) loginIP = models.CharField(max_length=12) def __unicode__(self): return str(self.id) + " " + self.username + " " + self.password class NoteAuthor(models.Model): userID = models.ForeignKey(User) noteID = models.ForeignKey(Note) class Tag(models.Model): tagName = models.CharField(max_length=20) noteID = models.ForeignKey(Note)
25.194444
71
0.702315
108
907
5.759259
0.416667
0.16881
0.202572
0.270096
0.125402
0
0
0
0
0
0
0.018945
0.185226
907
35
72
25.914286
0.822733
0.026461
0
0.173913
0
0
0.00227
0
0
0
0
0
0
1
0.086957
false
0.086957
0.043478
0.086957
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
12c5c48fe468909ab3820c3dcd375b372e264a69
607
py
Python
src/pyomexmeta/__init__.py
aram148/libOmexMeta
331a0a48570a212aaa2b6cb3fe72b9f43ac828af
[ "Apache-2.0" ]
null
null
null
src/pyomexmeta/__init__.py
aram148/libOmexMeta
331a0a48570a212aaa2b6cb3fe72b9f43ac828af
[ "Apache-2.0" ]
null
null
null
src/pyomexmeta/__init__.py
aram148/libOmexMeta
331a0a48570a212aaa2b6cb3fe72b9f43ac828af
[ "Apache-2.0" ]
null
null
null
from .pyomexmeta import PersonalInformation from .pyomexmeta import EnergyDiff from .pyomexmeta import PhysicalProcess from .pyomexmeta import RDF, Editor, PhysicalEntity from .pyomexmeta import SingularAnnotation from .pyomexmeta import OmexMetaException from .pyomexmeta_api import PyOmexMetaAPI, get_version, eUriType, eXmlType __version__ = get_version() def run_tests(): import os import unittest loader = unittest.TestLoader() start_dir = os.path.abspath(os.path.dirname(__file__)) suite = loader.discover(start_dir) runner = unittest.TextTestRunner() runner.run(suite)
30.35
74
0.789127
68
607
6.838235
0.5
0.210753
0.258065
0
0
0
0
0
0
0
0
0
0.14168
607
19
75
31.947368
0.892514
0
0
0
0
0
0
0
0
0
0
0
0
1
0.0625
false
0
0.5625
0
0.625
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
12d33f0e116da12db1d49faebe3c24c699cd7bd8
255
py
Python
flask/desktop-host.py
iarecrazy/AIris
b6f316a46b6d567a62712e2844e4baffddb8bbfe
[ "MIT" ]
null
null
null
flask/desktop-host.py
iarecrazy/AIris
b6f316a46b6d567a62712e2844e4baffddb8bbfe
[ "MIT" ]
3
2020-06-05T18:08:55.000Z
2021-06-10T20:14:27.000Z
flask/desktop-host.py
iarecrazy/AIris
b6f316a46b6d567a62712e2844e4baffddb8bbfe
[ "MIT" ]
null
null
null
import os from flask import Flask from app import * app.config['root_dir'] = 'X:\\' app.config['tmp_dir'] = 'C:\\Temp' app.config['path_to_index'] = os.path.join(app.config['tmp_dir'], 'index.json') if __name__ == '__main__': app.run(host='0.0.0.0')
19.615385
79
0.662745
44
255
3.545455
0.522727
0.230769
0.153846
0.192308
0
0
0
0
0
0
0
0.017778
0.117647
255
12
80
21.25
0.675556
0
0
0
0
0
0.282353
0
0
0
0
0
0
1
0
true
0
0.375
0
0.375
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
12f54719fe5c8a4055d1bfedf9e84f1ab5157eff
90
py
Python
semana_01/exercicios/exercicio_01.py
luispaulojr/cursoPython
24aaa73741508986d7f747be8f3822889be81025
[ "MIT" ]
null
null
null
semana_01/exercicios/exercicio_01.py
luispaulojr/cursoPython
24aaa73741508986d7f747be8f3822889be81025
[ "MIT" ]
null
null
null
semana_01/exercicios/exercicio_01.py
luispaulojr/cursoPython
24aaa73741508986d7f747be8f3822889be81025
[ "MIT" ]
null
null
null
print('O numero informado foi: {}'.format( int(input('Informe um número inteiro: '))))
45
47
0.666667
12
90
5
1
0
0
0
0
0
0
0
0
0
0
0
0.144444
90
2
47
45
0.779221
0
0
0
0
0
0.582418
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
12ff02b885037c933f8b02121472062d8be1bc93
181
py
Python
pymilldb/__main__.py
Toka-Taka/mill-db
1edf390f2ce89d9232ba91d722cb4b104c398078
[ "MIT" ]
2
2019-11-05T06:24:59.000Z
2020-03-06T09:04:38.000Z
pymilldb/__main__.py
bmstu-iu9/mill-db
a3725b11fcd995953dabc21f7fe6f4d5f5d38815
[ "MIT" ]
2
2019-05-22T09:40:51.000Z
2020-03-03T12:17:12.000Z
pymilldb/__main__.py
Toka-Taka/mill-db
1edf390f2ce89d9232ba91d722cb4b104c398078
[ "MIT" ]
6
2018-05-03T16:04:13.000Z
2019-12-01T11:01:07.000Z
import sys from .main import generate if __name__ == '__main__': if len(sys.argv) != 2: raise Exception('Insert only filename') else: generate(sys.argv[1])
20.111111
47
0.629834
24
181
4.416667
0.708333
0.132075
0
0
0
0
0
0
0
0
0
0.014706
0.248619
181
8
48
22.625
0.764706
0
0
0
1
0
0.154696
0
0
0
0
0
0
1
0
true
0
0.285714
0
0.285714
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
420e28b2a60a5915fbdb6541b8329035905ca2eb
717
py
Python
tests/testtask.py
MichaelDoyle/riemann-sumd
06f7599d4f93e16e0f37de29e3f9afad45c7f92d
[ "MIT" ]
31
2015-02-04T07:17:24.000Z
2021-08-20T21:20:58.000Z
tests/testtask.py
MichaelDoyle/riemann-sumd
06f7599d4f93e16e0f37de29e3f9afad45c7f92d
[ "MIT" ]
17
2015-03-08T14:33:14.000Z
2018-10-25T21:02:47.000Z
tests/testtask.py
MichaelDoyle/riemann-sumd
06f7599d4f93e16e0f37de29e3f9afad45c7f92d
[ "MIT" ]
5
2015-06-29T11:06:47.000Z
2018-10-27T05:46:25.000Z
import sys sys.path.append("lib") import unittest class TestTask(unittest.TestCase): def setUp(self): pass def test_add_tag(self): pass def test_add_timing(self): pass def test_skew(self): pass def test_start(self): pass def test_draing(self): pass class TestJSONTask(TestTask): def setUp(self): pass class TestHTTPJSONTask(TestTask): def setUp(self): # Perhaps start a simple http server with JSON output? # Or replay a captured copy of actual JSON output? pass class TestNagiosTask(TestTask): def setUp(self): # Scaffold up a nagios task that does something silly pass
16.674419
62
0.627615
91
717
4.868132
0.494505
0.126411
0.124154
0.1693
0.081264
0
0
0
0
0
0
0
0.301255
717
42
63
17.071429
0.884232
0.213389
0
0.52
0
0
0.005357
0
0
0
0
0
0
1
0.36
false
0.36
0.08
0
0.6
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
4224b369742a74c14c6de9afaaea5afafb05a9c6
2,121
py
Python
usaspending_api/disaster/tests/integration/test_disaster_def_code_count.py
g4brielvs/usaspending-api
bae7da2c204937ec1cdf75c052405b13145728d5
[ "CC0-1.0" ]
1
2020-08-14T04:14:32.000Z
2020-08-14T04:14:32.000Z
usaspending_api/disaster/tests/integration/test_disaster_def_code_count.py
g4brielvs/usaspending-api
bae7da2c204937ec1cdf75c052405b13145728d5
[ "CC0-1.0" ]
null
null
null
usaspending_api/disaster/tests/integration/test_disaster_def_code_count.py
g4brielvs/usaspending-api
bae7da2c204937ec1cdf75c052405b13145728d5
[ "CC0-1.0" ]
null
null
null
import pytest from rest_framework import status url = "/api/v2/disaster/def_code/count/" @pytest.mark.django_db def test_def_code_count_success(client, monkeypatch, disaster_account_data, helpers): helpers.patch_datetime_now(monkeypatch, 2022, 12, 31) helpers.reset_dabs_cache() resp = helpers.post_for_count_endpoint(client, url, ["L", "M", "N", "O", "P"]) assert resp.status_code == status.HTTP_200_OK assert resp.data["count"] == 5 resp = helpers.post_for_count_endpoint(client, url, ["N", "O"]) assert resp.status_code == status.HTTP_200_OK assert resp.data["count"] == 2 resp = helpers.post_for_count_endpoint(client, url, ["P"]) assert resp.status_code == status.HTTP_200_OK assert resp.data["count"] == 1 resp = helpers.post_for_count_endpoint(client, url, ["9"]) assert resp.status_code == status.HTTP_200_OK assert resp.data["count"] == 0 @pytest.mark.django_db def test_def_code_count_invalid_defc(client, monkeypatch, disaster_account_data, helpers): helpers.patch_datetime_now(monkeypatch, 2022, 12, 31) resp = helpers.post_for_count_endpoint(client, url, ["ZZ"]) assert resp.status_code == status.HTTP_400_BAD_REQUEST assert resp.data["detail"] == "Field 'filter|def_codes' is outside valid values ['9', 'L', 'M', 'N', 'O', 'P']" @pytest.mark.django_db def test_def_code_count_invalid_defc_type(client, monkeypatch, disaster_account_data, helpers): helpers.patch_datetime_now(monkeypatch, 2022, 12, 31) resp = helpers.post_for_count_endpoint(client, url, "100") assert resp.status_code == status.HTTP_400_BAD_REQUEST assert resp.data["detail"] == "Invalid value in 'filter|def_codes'. '100' is not a valid type (array)" @pytest.mark.django_db def test_def_code_count_missing_defc(client, monkeypatch, disaster_account_data, helpers): helpers.patch_datetime_now(monkeypatch, 2022, 12, 31) resp = helpers.post_for_count_endpoint(client, url) assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY assert resp.data["detail"] == "Missing value: 'filter|def_codes' is a required field"
40.788462
115
0.737388
312
2,121
4.717949
0.24359
0.095109
0.071332
0.085598
0.799592
0.793478
0.773098
0.773098
0.664402
0.61413
0
0.036046
0.136728
2,121
51
116
41.588235
0.767886
0
0
0.378378
0
0.027027
0.134842
0.015087
0
0
0
0
0.378378
1
0.108108
false
0
0.054054
0
0.162162
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
42366cfa4ee1d3d101bd1c0152db5a64fe7833f6
33,030
py
Python
enaml/qt/docking/dock_resources.py
xtuzy/enaml
a1b5c0df71c665b6ef7f61d21260db92d77d9a46
[ "BSD-3-Clause-Clear" ]
1,080
2015-01-04T14:29:34.000Z
2022-03-29T05:44:51.000Z
enaml/qt/docking/dock_resources.py
xtuzy/enaml
a1b5c0df71c665b6ef7f61d21260db92d77d9a46
[ "BSD-3-Clause-Clear" ]
308
2015-01-05T22:44:13.000Z
2022-03-30T21:19:18.000Z
enaml/qt/docking/dock_resources.py
xtuzy/enaml
a1b5c0df71c665b6ef7f61d21260db92d77d9a46
[ "BSD-3-Clause-Clear" ]
123
2015-01-25T16:33:48.000Z
2022-02-25T19:57:10.000Z
# -*- coding: utf-8 -*- # Resource object code # # Created: Tue Jul 2 13:23:21 2013 # by: The Resource Compiler for PyQt (Qt v4.8.3) # # WARNING! All changes made in this file will be lost! # this line manually edited from enaml.qt import QtCore qt_resource_data = b"\ \x00\x00\x02\x61\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x6f\x00\x00\x00\x6f\x08\x06\x00\x00\x00\xe2\xc5\x9e\x60\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x02\x03\x49\x44\x41\x54\x78\x9c\xed\ \xdc\xc1\x49\xc5\x40\x14\x46\xe1\xff\xe9\x2b\x2d\xb5\xd8\x42\xb2\ \x75\xf7\xd2\x82\x6d\x58\x43\x96\xf6\x93\x9d\x2e\x64\xe0\x21\x88\ \x20\x38\x77\xce\x78\x4e\x03\x73\xc9\x47\x36\x21\x73\x2f\x19\xa8\ \x75\x5d\xdf\xab\x67\xf8\xa9\x7d\xdf\x2f\xd5\x33\xb4\xae\xd5\x03\ \x7c\xed\x76\xbb\xbd\x54\xcf\xf0\x5d\xdb\xb6\x3d\x55\xcf\x70\xdf\ \x43\xf5\x00\xf6\xfb\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\ \x70\xe2\x81\x13\x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\ \x07\x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\ \xf1\xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\xdc\x63\xf5\x00\xad\ \x75\x5d\xdf\x47\xfe\xe1\x36\x49\x96\x65\x79\x3b\xcf\xf3\xf5\x38\ \x8e\xe7\xea\x59\x92\x41\xf0\x08\x70\xad\x91\x00\xcb\xf1\x48\x70\ \xad\x51\x00\x4b\xf1\x88\x70\xad\x11\x00\xcb\xf0\xc8\x70\xad\x6a\ \xc0\x12\xbc\x19\xe0\x5a\x95\x80\xdd\xf1\x66\x82\x6b\x55\x01\x76\ \xc5\x9b\x11\xae\x55\x01\xd8\x0d\x6f\x66\xb8\x56\x6f\xc0\x4b\xaf\ \xab\xc4\xb3\xc3\xdd\xd7\xeb\x06\xed\xe5\x3f\xbc\x11\x33\xb6\x6d\ \xdb\x93\xdf\x36\xc1\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\ \xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\ \x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\ \xe2\x81\x13\x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\ \x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\ \xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\xe0\xdc\xc3\xf2\ \x07\x75\xdb\xc3\xd2\xe3\x90\xe4\x7f\x6c\x40\x4a\x3e\xe1\xf6\x7d\ \xef\xf2\x5c\xbb\xad\xaf\x3a\x8e\xe3\xf9\x3c\xcf\xd7\x65\x59\xde\ \x7a\x9d\xd9\xbb\x9e\x70\x49\xe7\xc5\x71\x33\x03\xf6\x86\x4b\x0a\ \x56\x36\xce\x08\x58\x01\x97\x14\x2d\x4b\x9d\x09\xb0\x0a\x2e\x29\ \x5c\x53\x3c\x03\x60\x25\x5c\x52\xbc\x20\x9c\x0c\x58\x0d\x97\x0c\ \xb0\x9a\x9f\x08\x38\x02\x5c\x32\x00\x5e\xc2\x02\x1c\x05\x2e\x19\ \x04\x2f\x61\x00\x8e\x04\x97\xf8\x6d\x13\x9d\x78\xe0\xc4\x03\x27\ \x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\xe0\ \xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\ \x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\xe2\ \x81\xbb\x56\x0f\xf0\xb5\x5e\x57\x82\x67\xe8\x03\xdb\xf1\xfe\x32\ \xdf\x7a\xb4\x66\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \ \x00\x00\x01\xda\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x49\x49\x44\x41\x54\ \x48\x4b\xbd\xcc\xbd\x4a\x03\x51\x10\x86\x61\x2f\xce\x8b\xf0\x46\ \x2c\x2c\x2c\x2c\x2c\x14\x2c\x62\x61\x21\xc1\xc6\xc2\xc2\xc2\xc2\ \xc2\xc2\x1b\x10\x11\x11\x91\x20\x22\x31\x6e\xf6\xf7\xec\x6e\xf6\ \x37\xd9\x24\x36\xce\x30\x73\xa6\x90\x54\x0e\x67\xe1\xed\xbe\x8f\ \x67\xeb\x78\x30\x74\x17\xea\x6d\xb7\x76\x11\xeb\xcd\x62\xe5\xa2\ \x5e\xf4\x7a\xbe\xa4\xb6\x77\xf6\xf4\x89\xd6\x8b\x5e\xb5\x1d\xf5\ \xe7\xf7\xbf\x44\xeb\x45\x2f\x9b\x05\x05\xdb\x8f\xb9\xd7\x04\x82\ \x68\x9b\xf4\xf8\x4e\xd3\x06\xbd\xa8\xe7\x14\xea\xe1\xad\x26\x10\ \x44\x63\x7d\x56\xb5\x14\xea\xfe\x8d\x26\x10\x44\x63\x3d\x2f\x1b\ \x0a\xb6\xb5\x77\xad\x09\x04\xd1\x58\xcf\x8a\x86\x42\x7d\x72\xa5\ \x09\x04\xd1\x58\x37\x79\x4d\xa1\x3e\xbe\xd4\x04\x82\x68\xac\x27\ \x59\x45\xc1\xb6\xfa\xbc\xd0\x04\x82\x68\xac\xc7\x69\x49\xa1\xfe\ \x71\xae\x09\x04\xd1\x58\x8f\x4c\x41\xa1\xfe\x7e\xa6\x09\x04\xd1\ \x58\x0f\x93\x82\x42\x7d\x74\xaa\x09\x04\xd1\x58\x0f\xe2\x19\x05\ \xdb\xf2\xed\x44\x13\x08\xa2\xb1\xee\x47\x39\x85\xfa\xeb\x91\x26\ \x10\x44\x63\x7d\x1a\xe5\x14\xea\x2f\x87\x9a\x40\x10\xcd\xea\x61\ \x46\xc1\xd6\x3d\x1f\x68\x42\xdd\x6a\xac\x7b\x41\x46\xa1\xfe\xb4\ \xaf\x09\x04\xd1\x58\xff\x0e\x52\x0a\xf5\x47\x55\x20\x88\x66\x75\ \x3f\xa5\x50\x7f\xd8\xd5\x84\xba\xd5\x58\x9f\xf8\x86\x82\x4d\x9f\ \x68\xac\x7f\x4d\x8d\x8b\xac\xee\x25\x2e\x62\x7d\xec\x25\x2e\x62\ \xdd\x55\x83\xe1\x2f\x82\x32\x64\x70\x80\xdc\x0e\xed\x00\x00\x00\ \x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x02\xcb\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x8b\x00\x00\x00\x8b\x08\x06\x00\x00\x00\x51\x19\x6a\xff\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x02\x6d\x49\x44\x41\x54\x78\x9c\xed\ \xdd\xb1\x6d\xdc\x50\x10\x45\xd1\x59\x5b\xa5\xb1\x16\xb5\x40\xa6\ \xca\x96\x2d\xb8\x0d\xd5\xc0\x50\xfd\x30\x93\x23\x02\x4e\x0c\x3d\ \x63\xd7\x20\xe7\xeb\x9c\x0a\x1e\xb0\x77\x23\x02\xf3\x6f\x35\x80\ \x79\x9e\x3f\xcf\xde\xf0\x95\x75\x5d\x6f\x67\x6f\x78\xd4\xcb\xd9\ \x03\x9e\xe5\x7e\xbf\xff\x3a\x7b\xc3\xdf\x2c\xcb\xf2\x7a\xf6\x86\ \x67\xf8\x71\xf6\x00\xfa\x10\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\ \x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\ \x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\ \x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\ \xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\ \x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\ \x13\x0b\xb1\x9f\x67\x0f\x78\xd4\x3c\xcf\x9f\x57\x3e\x6b\x5a\x55\ \x35\x4d\xd3\xc7\xbe\xef\xef\xdb\xb6\xbd\x9d\xbd\xe5\x11\xad\x63\ \xe9\x10\xca\x61\x84\x60\xda\xc6\xd2\x29\x94\x43\xf7\x60\x5a\xc6\ \xd2\x31\x94\x43\xe7\x60\xda\xc5\xd2\x39\x94\x43\xd7\x60\x5a\xc5\ \x32\x42\x28\x87\x8e\xc1\xb4\x89\x65\xa4\x50\x0e\xdd\x82\x69\x11\ \xcb\x88\xa1\x1c\x3a\x05\x73\xf9\x58\x46\x0e\xe5\xd0\x25\x98\xdb\ \xd5\x1f\x76\x1a\x3d\x94\x3f\x5d\xfd\x5d\xa2\xdb\x77\xf8\xe7\xf2\ \xb8\x65\x59\x5e\x7d\x1b\x22\x26\x16\x62\x62\x21\x26\x16\x62\x62\ \x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\ \x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\ \x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\ \x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\ \x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\ \x21\x26\x16\x62\xee\xe0\x5e\xc8\xe5\xef\xe0\x9e\x3d\xe0\x2b\xdf\ \xe5\x4e\xef\xb2\x2c\xaf\xeb\xba\x5e\xfa\xf7\xb8\xfc\x39\xf6\x6d\ \xdb\xde\xf6\x7d\x7f\x9f\xa6\xe9\xe3\xec\x2d\xff\x4b\x87\x50\xaa\ \x1a\xc4\x52\x35\x76\x30\x5d\x42\xa9\x6a\x12\x4b\xd5\x98\xc1\x74\ \x0a\xa5\xaa\x51\x2c\x55\x63\x05\xd3\x2d\x94\xaa\x66\xb1\x54\x8d\ \x11\x4c\xc7\x50\xaa\x1a\xc6\x52\xd5\x3b\x98\xae\xa1\x54\x35\x8d\ \xa5\xaa\x67\x30\x9d\x43\xa9\x6a\x1c\x4b\x55\xaf\x60\xba\x87\x52\ \xd5\x3c\x96\xaa\x1e\xc1\x8c\x10\x4a\x95\x6f\x43\xfc\x03\xb1\x10\ \x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\ \x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\ \x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\ \xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\ \x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\ \x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x7b\x39\x7b\xc0\xb3\x5c\ \xfd\x61\xa7\x11\xfc\x06\x85\xf5\xfe\x6a\xa4\x26\xa3\xb0\x00\x00\ \x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x01\xe1\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x13\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x8a\xf0\x61\xe0\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x50\x49\x44\x41\x54\ \x48\x4b\xa5\x95\x39\x4a\x04\x61\x10\x46\xe7\x70\x1e\xc2\x8b\x18\ \x18\x18\x18\x18\x28\x18\x68\x60\x20\x62\x62\x60\x60\x60\x60\x60\ \xe0\x05\x44\x44\x44\x64\x10\x91\x59\x7a\x5f\xa7\xd7\x99\x9e\x25\ \xb1\x6a\xaa\xfe\x02\xc1\x1a\xe9\x16\x5e\xf6\xbe\x47\x85\xd5\x3b\ \x3a\xb9\xe8\x46\x0f\xb2\xba\x59\xb6\x05\x2a\x2c\xab\xd9\xa2\x2d\ \xff\x2e\xcb\xe9\x9c\xd8\xda\xde\xdd\x8c\x2c\xf9\x66\xf7\xb2\xa8\ \x1b\xe2\xcf\x9b\xb2\xe4\x9b\xdd\xcb\xbc\x9a\x11\x70\x73\x15\x3d\ \x68\x80\x95\x25\xdf\xfc\x51\x06\xf7\x2b\x85\x5f\xca\xac\x9c\x12\ \x78\xd3\xbb\xd3\x00\x2b\x4b\xbe\x39\x29\x6a\x02\x4b\xe7\x56\x03\ \xac\x2c\xb9\x4c\xf3\x8a\x00\xb7\xb4\x6e\x34\xc0\xca\x92\xcb\x24\ \xab\x08\x2c\x47\xd7\x1a\x60\x65\xc9\x65\x94\x96\x04\x96\x83\x2b\ \x0d\xb0\xb2\xe4\x32\x4c\x0a\x02\xdc\xe2\xeb\x52\x03\xac\x2c\xb9\ \x0c\xe2\x9c\xc0\xf2\xf3\x5c\x03\xac\x2c\xb9\xf4\xa3\x8c\xc0\xf2\ \xe3\x4c\x03\xac\x2c\xb9\xf4\xc2\x8c\xc0\xb2\x7f\xaa\x01\x56\x96\ \x5c\xba\xc1\x84\x00\x37\x7f\x3f\xd6\x00\x2b\x4b\x2e\x1d\x3f\x25\ \xb0\x7c\x3b\xd4\x00\x2b\x4b\x2e\x6d\x3f\x25\xb0\x7c\x3d\xd0\x00\ \x2b\x4b\x53\x7a\x89\xbd\x06\x5c\xf3\xb2\xaf\x81\xa5\x59\x72\x69\ \xb9\x09\x81\xe5\xf3\x9e\x06\x58\x59\x72\x39\x76\x63\x02\xcb\x27\ \x15\xb0\xb2\x34\xa5\x13\x8f\xd7\x60\xf9\xb8\xa3\x81\xa5\x59\x72\ \x39\x72\x22\x02\xdc\x66\x64\xc9\xe5\xd0\x8e\xda\x62\x4a\x2b\x1c\ \xb6\x84\xcb\x81\x15\xb6\x85\xcb\x6e\xbf\xec\x1b\xdd\xce\x28\xdf\ \xf5\x17\x62\x31\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \ \x00\x00\x01\xb8\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x27\x49\x44\x41\x54\ \x48\x4b\xbd\xd2\x3d\x4e\xc3\x40\x10\x86\x61\x0e\xc7\x21\x38\x0a\ \xd4\x74\x48\x29\x42\x1a\x8a\xc8\x14\xd4\x34\x49\xc3\x15\x68\xa8\ \x68\x68\x02\x38\x76\x9c\xf5\x4f\xfc\x9b\x38\x09\x0d\x63\xcd\xe7\ \x45\x5a\x27\x01\xb1\x1a\x4b\xaf\xb6\xd8\x19\x3d\xdb\xec\xd9\xcd\ \x70\x2c\x57\xa3\x57\xf5\x5e\x22\xe8\xe5\x66\x27\x51\x2f\x7a\xb1\ \xde\x72\xe7\x17\x57\xf6\x69\xad\x17\x3d\xaf\x6a\xce\xd8\xfb\x5f\ \x5a\xeb\x45\xcf\xca\x0d\x47\xb3\x2f\xf5\x64\x13\x09\x5a\x3b\xa0\ \xef\xbd\x47\x9b\x0e\xe8\x69\xb1\xe6\x1a\x7d\xf6\x60\x13\x09\x5a\ \x83\xbe\xca\x2b\x8e\x66\xbb\xb7\x3b\x9b\x48\xd0\x1a\xf4\x24\x2b\ \x39\x9a\x6d\x5f\x07\x36\x91\xa0\x35\xe8\x71\x5a\x72\x8d\xfe\x72\ \x6d\x13\x09\x5a\x83\x1e\x26\x05\x47\xb3\xfa\xf9\xd2\x26\x12\xb4\ \x06\x5d\xc5\x39\x47\x33\xfb\xb4\x06\x7d\x19\x65\x12\x41\x0f\xc2\ \x54\x22\xe8\x0b\x95\x4a\x04\xdd\x5f\xae\x24\x82\xee\x05\x89\x44\ \xd0\xe7\x41\xf2\x97\x46\xf7\x53\xfa\x12\x74\x1a\xf7\xc7\x6a\xf5\ \x45\xfc\x6b\x4c\x73\xcd\x03\x9d\x85\x6e\xd0\x5d\x3f\x3e\xdd\xc8\ \xf9\xa1\xf1\x80\x33\x35\x76\xba\x41\xff\xf4\xa3\x13\xdd\x3a\x13\ \x83\xe6\xe8\xde\xd8\x34\x6a\x75\x2f\x92\x08\xfa\x87\x17\x4a\x04\ \xfd\x7d\x1e\x4a\xd4\xea\xae\x92\x08\xfa\xcc\x55\x12\x41\x97\x6a\ \x38\xfe\x06\xe0\x80\xad\xee\xa3\x69\x89\x6f\x00\x00\x00\x00\x49\ \x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\xc2\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x29\x00\x00\x00\x29\x08\x06\x00\x00\x00\xa8\x60\x00\xf6\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x00\x64\x49\x44\x41\x54\x58\x85\xed\ \xd9\xc1\x0d\x80\x30\x0c\xc0\xc0\x14\x31\x2b\x2b\x24\x23\x64\x06\ \x96\x85\x09\x90\x78\x58\x6a\x2b\xd9\x13\xdc\xdf\x23\x33\x9f\x58\ \xbc\x33\x22\xa2\xbb\xef\xd9\x90\xaf\xaa\xea\x3a\x66\x23\xfe\x24\ \x92\x4a\x24\x95\x48\x2a\x91\x54\x22\xa9\x44\x52\x89\xa4\x12\x49\ \x25\x92\x4a\x24\x95\x48\x2a\x91\x54\x22\xa9\x44\x52\x89\xa4\x12\ \x49\x25\x92\x4a\x24\xd5\x16\xc8\xb1\xc3\xc7\x79\x01\x28\xc6\x09\ \x1b\x33\x94\xbf\xef\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ \x00\x00\x01\xc2\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x31\x49\x44\x41\x54\ \x48\x4b\xbd\xcd\xbb\x4e\xc3\x40\x10\x85\x61\x1e\x8e\x87\xe0\x4d\ \x10\x25\x6d\xba\x88\x36\xb4\xb4\xd4\x3c\x02\x05\x0d\x0d\x05\x12\ \x04\xc7\x8e\xb3\xbe\xc4\xd7\xc4\x49\x68\x38\xab\x99\x1d\xad\x28\ \x67\xb5\x96\x7e\x59\x23\x9d\xd5\xe7\xab\xc5\x72\x15\x2f\xab\x8f\ \xd3\x25\x46\xac\x0f\xc7\x73\x8c\x66\xd1\xfb\xc3\x89\xba\xbe\xb9\ \x0b\x4f\xb4\x59\xf4\x6e\x9c\xa8\x7f\xef\x74\x89\x36\x8b\xde\x0e\ \x47\x0a\x9b\xdc\xba\x7c\x61\x16\xbd\xe9\x0f\x14\x36\xb9\x75\xf9\ \x02\xeb\xfb\x6e\xa4\xb0\xc9\xad\xcb\x17\x58\xaf\xdb\x81\xc2\x26\ \xb7\x2e\x5f\x60\xbd\x6a\x06\x0a\x1b\xfa\x35\x2f\xea\x2f\x12\x8d\ \xf5\xa2\xee\x29\xfb\x28\x7b\x0e\x09\x82\x68\xac\x9b\xaa\xa3\xb0\ \x5d\xd6\x4f\x21\x41\x10\x8d\xf5\x5d\xd9\x52\xd8\xce\x9f\x8f\x21\ \x41\x10\x8d\xf5\xbc\x68\x28\xab\x7f\x3c\x84\x04\x41\x34\xd6\xb7\ \xa6\xa1\xb0\x9d\xde\x17\x21\x41\x10\x8d\xf5\x6c\xb7\xa7\xb0\x4d\ \x6f\xf7\x21\x41\x10\x8d\xf5\x34\xaf\x29\x6c\xf6\x07\xaf\xb7\xea\ \x2f\x12\x8d\xf5\x4d\x5e\x53\xd8\xe4\xd6\xe5\x0b\x4e\xdf\x56\x94\ \xdd\xdc\xad\xcb\x17\x58\x4f\xb2\x8a\xc2\x26\xb7\x2e\x5f\x60\xfd\ \x27\x2b\x29\x6c\x72\xeb\xf2\x05\xa7\xa7\x25\x65\x37\x77\xeb\xf2\ \x05\xd6\xd7\x69\x41\x61\x0b\x4f\x34\xd6\xbf\x37\x45\x8c\x9c\x9e\ \x98\x18\xb1\xfe\x95\x98\x18\xb1\x1e\xab\xe5\xea\x0f\x0e\x98\x91\ \x35\xc6\xa1\x36\xaa\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ \x00\x00\x01\x4e\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x0a\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x51\x4b\xcb\xc2\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0b\x12\x00\x00\x0b\x12\x01\xd2\xdd\ \x7e\xfc\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x00\xbd\x49\x44\x41\x54\ \x38\x4f\x8d\xc9\xc9\x8d\xc2\x50\x00\x04\x51\x82\x23\x08\x52\x21\ \x05\x6e\xdc\x89\x6d\x00\x63\x63\xbc\xe1\x95\x65\xe6\x34\xa0\xea\ \xdf\xa7\x0f\xb2\x54\x97\xd2\x5b\x6c\xb6\xbb\x2f\xbd\x79\x7a\xfc\ \x45\x13\x8f\xf7\xdf\x68\xf3\x78\xb8\x3d\x69\xb9\x5a\xbf\xf2\xce\ \xe3\x7e\x7a\x10\xec\x9d\xc7\xdd\x78\x27\xd8\x3b\x8f\xdb\xe1\x46\ \xb0\x57\x7c\xed\x27\x82\xbd\xe2\xa6\x1b\x09\xf6\x8a\xeb\x76\x24\ \xd8\x2b\x2e\x9b\x81\x60\xaf\xb8\xa8\x7b\x82\xbd\xe2\x4b\xd5\x11\ \xec\x15\xe7\x65\x4b\xb0\x57\x7c\x2e\x5a\x82\xbd\xe2\xec\x72\x25\ \xd8\x2b\x4e\xf3\x86\x60\xaf\xf8\x94\x37\x04\x7b\x03\x9f\x6b\x12\ \x87\x15\x27\x59\x4d\xb0\x57\x7c\xcc\x2a\x82\xbd\x81\xd3\x8a\xc4\ \x61\xc5\x87\xb4\x24\xd8\x2b\xde\x9f\xca\x68\x81\x93\x22\x9a\xf8\ \x27\x29\xa2\x89\x3f\xb6\xdd\xfd\x03\xaf\x34\xbc\x27\xb0\x9e\x89\ \xd7\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x02\x22\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x91\x49\x44\x41\x54\ \x48\x4b\xbd\xcd\xcb\x4a\xc3\x50\x10\xc6\x71\x1f\xce\x87\xf0\x45\ \x5c\xb8\x70\xe1\xc2\x85\x82\x8b\x8a\xd6\x0b\x45\x5a\x5b\x70\xe1\ \xc2\x85\xb5\x5d\xf8\x02\x22\x22\x22\x22\x22\x52\x6b\x9a\x5e\x92\ \x26\x4d\xdb\xf4\xee\xc6\x19\x32\x19\x0e\xa7\x65\x7a\x10\x52\xf8\ \xad\x26\xdf\xf9\x67\x6d\x3f\x95\x49\x0e\xd6\x07\xe3\x59\x12\xa8\ \x1e\x8e\xa6\x49\x58\x49\xbd\x3f\x9c\x18\x5a\xdf\xd8\x5a\x8a\xc7\ \x2b\xa9\xf7\x06\x63\x43\x5a\x68\x21\x1e\xaf\xa4\xde\x0d\x47\x86\ \xe0\xf1\xaf\x7b\x2f\x80\x01\x8f\xff\x55\x6f\x95\x05\x0b\xea\x41\ \x7f\x68\x08\xeb\x8d\x5b\x01\x0c\x78\x4c\xf5\x4e\x6f\xa0\x3a\x2b\ \x94\xb5\x0b\xc3\xba\x7d\x23\x80\x01\x8f\xa9\xee\x77\x43\x15\x2c\ \x4e\x0b\x25\xed\x18\x81\x4f\x33\xeb\x5a\x00\x03\x1e\x53\xdd\x0b\ \x42\x15\x2c\xf0\x07\xf9\x92\x76\x07\x58\xaf\x5e\x09\x60\xc0\x63\ \xaa\xbb\x7e\x5f\x15\xd5\xc1\xc9\x65\x69\xfe\xd3\xac\x92\x17\xc0\ \x80\xc7\x54\x77\xbc\x9e\x8a\xeb\x60\xfe\xd3\xf4\xeb\x42\xa0\x3e\ \xa1\x7a\xab\xdd\x55\x71\x3a\x9d\xbb\x9b\xff\x34\xfd\x3c\x17\xc0\ \x80\xc7\x54\x6f\xba\x81\x2a\x4a\x1f\xe7\x8a\xda\x1d\x60\xfd\x23\ \x2d\x80\x01\x8f\xa9\xde\x70\x02\x15\xa6\xb3\x45\xed\x18\xc1\xfa\ \xfb\xa1\x00\x06\x3c\xa6\x7a\xbd\xd5\x51\x1d\x65\x8b\xda\x85\xc1\ \xe3\xc9\xdb\x81\x00\x06\x3c\xa6\xba\xdd\xf4\x0d\x61\xfd\x75\x4f\ \x00\x03\x1e\x53\xbd\xd6\xf4\x0d\x61\xfd\x65\x57\x00\x03\x1e\xc7\ \xf5\x86\x67\x08\x1e\x8f\x9f\x77\x04\x58\x8f\xc7\x54\xb7\xea\x9e\ \x21\xac\x3f\x6d\x0b\x60\xc0\x63\xaa\xff\xd4\xdb\x86\xb0\xfe\x28\ \x81\x01\x8f\xe3\xba\xdd\x36\x84\xf5\x87\x4d\x01\xd6\xe3\x31\xd5\ \xab\xb6\x6b\x08\x1e\x2f\xc5\x63\xaa\x7f\xd7\xdc\x24\xc4\x75\xcb\ \x49\x02\xd5\x2b\x96\x93\x04\xaa\x27\x25\x95\xf9\x03\x6c\x41\xe7\ \xb2\x07\xe6\xaf\xd1\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ \x00\x00\x02\x24\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x93\x49\x44\x41\x54\ \x48\x4b\xbd\xcc\xbb\x4a\x03\x61\x10\x05\x60\x1f\xce\x87\xf0\x45\ \x2c\x52\x58\x58\x58\x28\x58\xc4\xc2\x42\x82\x8d\x85\x85\x85\x85\ \x85\x85\x2f\x20\x22\x22\x22\x22\x22\x31\xe6\x9e\xcd\xfd\xe6\x66\ \xb3\x8d\x27\xcc\xec\xe1\xe7\xff\x19\x10\xc3\x06\xbe\xe2\x30\x73\ \x38\x5b\x47\xc5\x52\x7e\x56\xeb\xb3\x78\x99\x07\x5d\x9f\xfe\x24\ \x79\xd8\xc8\xfa\x64\xbe\x10\xdb\x3b\x85\xf5\x71\x6d\x23\xeb\xe3\ \x59\x2c\xbc\xde\xff\x70\x6d\x23\xeb\xa3\xe9\x8f\xc0\x8f\x99\xd2\ \xe8\xce\xe2\x35\xc1\x5d\xf8\xd3\x3a\x8e\x69\xfb\x36\x64\x95\x99\ \x75\x7d\x38\x99\x0b\xfc\x98\x29\x6d\xde\x58\xbc\x26\xb8\x0b\xba\ \x3e\x18\xcf\x04\x7e\xcc\x84\x63\x5a\xbf\x0e\x59\x65\x66\x5d\xef\ \x8f\xa6\x02\x3f\x66\x5a\x56\xaf\x2c\x5e\x13\xdc\x05\x5d\xef\x0d\ \xa7\x02\x3f\x66\xc2\x71\x59\xb9\x0c\x59\x65\x66\x5d\x8f\xfa\x13\ \x81\x1f\x33\x2d\xcb\x17\x16\xaf\x09\xee\x82\xae\x77\x7a\x63\x81\ \x1f\x33\xe1\x98\x7c\x9e\x87\xac\x32\xb3\xae\xb7\xbb\x23\x81\x1f\ \x33\x25\x1f\x67\x16\xaf\x09\xee\x82\xae\xb7\xa2\xa1\xc0\x8f\x99\ \x70\x4c\xde\x4f\x43\x56\x99\x59\xd7\x9b\x9d\xa1\xc0\x8f\x99\x92\ \xb7\x13\x8b\xd7\x04\x77\x41\xd7\x1b\xed\x81\xc0\x8f\x99\x70\x5c\ \xbc\x1e\x87\xac\x32\xb3\xae\xd7\x5b\x7d\x81\x1f\x33\x2d\x5e\x0e\ \x2d\x5e\x13\xdc\x05\x5d\xaf\xb5\xfa\x02\x3f\x66\xc2\x71\xf1\x7c\ \x10\xb2\xca\xcc\xd9\x7a\xb3\x27\x56\xbf\x2c\x53\xfc\xb4\x6f\xf1\ \x9a\xe0\x2e\xe8\x7a\xb5\xd1\x13\xf8\x31\x13\x8e\xf1\xe3\x5e\xc8\ \x2a\x33\xeb\xfa\x77\xa3\x2b\xf0\x63\xa6\xf8\xa1\x60\xf1\x9a\xe0\ \x2e\x64\xeb\xf5\xae\x58\xfd\xb2\x4c\x38\xc6\xf7\xbb\x21\xab\xcc\ \xac\xeb\x95\x7a\x24\xf0\x5b\x1f\xd7\x74\xfd\xab\x16\xe5\x21\x5b\ \xaf\x76\xf2\xa0\xeb\xe5\x6a\x27\x0f\xba\x9e\x97\x62\xe9\x17\xda\ \xb5\x98\x10\x31\x42\x5d\xab\x00\x00\x00\x00\x49\x45\x4e\x44\xae\ \x42\x60\x82\ \x00\x00\x00\xed\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x0a\x08\x02\x00\x00\x00\xc3\xd7\x12\x46\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0b\x12\x00\x00\x0b\x12\x01\xd2\xdd\ \x7e\xfc\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x00\x5c\x49\x44\x41\x54\ \x38\x4f\x63\xe8\x9a\x38\x87\x76\x08\x64\xfa\xa7\xaf\x3f\x68\x81\ \xa0\xa6\x7f\xf8\xfc\x8d\x16\x08\x6a\xfa\xbb\x8f\x5f\x68\x81\xa0\ \xa6\xbf\x7a\xfb\x11\x82\x5c\x22\x2a\x29\x47\x70\xd3\xa0\xa6\x3f\ \x7b\xf9\x1e\x82\xd0\xd4\x91\x87\xe0\xa6\x41\x4d\x7f\xfc\xec\x2d\ \x2d\x10\xd4\xf4\x87\x4f\x5e\xd3\x02\x41\x4d\xbf\xff\xe8\x25\x2d\ \x10\xd4\x74\x5a\xa1\x89\x73\x00\xf8\x06\xba\x5a\xe8\x93\x6f\x68\ \x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x01\x3f\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x13\x08\x02\x00\x00\x00\xe7\x0e\x41\x15\ \x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc2\x00\x00\x0e\xc2\ \x01\x15\x28\x4a\x80\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\ \x74\x77\x61\x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\ \x76\x33\x2e\x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x00\xbb\x49\ \x44\x41\x54\x38\x4f\xbd\xd5\x2b\x0e\xc2\x50\x10\x85\xe1\x2e\x8e\ \x45\xb0\x15\x34\x8e\x04\x81\x43\xe0\xd0\x78\x56\x81\xc2\xd4\xa0\ \x48\xa0\xa5\xa5\xbd\x8f\x3e\x31\x9c\x66\x26\x13\x04\x6e\x18\x92\ \xdf\x4d\xf2\xdd\x23\x9a\x34\x59\x6d\x76\x76\x25\xa0\x63\x37\x5a\ \x04\x79\xd2\x43\x3b\x58\xf4\x17\xdd\x37\x3d\x35\x9b\x2f\xf4\x89\ \xc6\xdb\x6d\x75\x17\x3b\x4a\x3f\x1c\x82\x68\xbc\xdd\x56\xaf\x43\ \x4b\xe1\xe5\x57\x76\xd4\x04\x41\x34\xde\xfe\xa9\x8f\xd7\x83\xa6\ \x2f\x7a\xe5\x1b\x0a\xb7\xf1\xb2\xd7\x04\x41\x34\xde\xfe\x74\x91\ \xc2\x6d\x48\xb7\x9a\x20\x88\xc6\x7a\x59\x07\x0a\xb7\xfe\xbc\xd6\ \x04\x41\x34\xd6\x8b\x2a\x50\x93\x7e\x5a\x6a\x82\x20\x1a\xeb\x79\ \xe9\xa9\x9f\x7c\xef\xa2\xb1\x9e\x15\xce\x22\xd6\xef\x8f\xda\x22\ \xd6\x6f\x79\x65\x11\xeb\x76\xff\xa6\x37\x06\x80\x09\x57\x1d\xbe\ \x2e\x15\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x02\x26\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x95\x49\x44\x41\x54\ \x48\x4b\xbd\xcd\xbb\x4a\x03\x51\x10\xc6\x71\x1f\xce\x87\xf0\x45\ \x2c\x2c\x2c\x2c\x2c\x14\x2c\x22\x1a\xa3\x04\x49\x4c\xc0\xc2\xc2\ \xc2\x68\x0a\x5f\x40\x44\x44\x44\x44\x44\x62\xdc\x5c\x77\xb3\x9b\ \xcd\xfd\x66\xe3\x0c\x33\x3b\x84\x93\x30\x07\x84\x0d\xfc\xaa\x73\ \xbe\xf3\x3f\x6b\xfb\x89\x74\x7c\xb0\x3e\x18\xcf\xe2\xc0\xf5\xfe\ \x68\x1a\x87\x95\xd4\x7b\xc3\x09\x59\xdf\xd8\xb2\x92\xb1\xd5\x4a\ \xea\xdd\xc1\x98\x18\xa1\xa5\x64\x6c\xb5\x92\x7a\xa7\x3f\x22\xf0\ \xf8\xd7\xbb\x57\xc0\x40\xc6\x56\xcb\xea\xcd\xa2\xe2\x3f\xf5\xb0\ \x37\x24\x58\xaf\xdf\x28\x60\x20\x63\x2b\xae\xb7\xbb\x03\x82\xf5\ \xea\xb5\x02\x06\x32\x36\x9c\xe6\x8b\xc6\x09\xd7\x83\x4e\x9f\xc0\ \xe3\x99\x73\xa5\x80\x81\x8c\xe7\xa5\xf2\x77\x8b\x57\x5c\xf7\xc3\ \x3e\xc1\x7a\xf9\x52\x01\x03\x19\x8b\x54\x0e\xd3\x8b\x57\x5c\xf7\ \x82\x1e\xc1\x7a\x29\xa7\x80\x81\x8c\xc9\xc9\x05\xa7\x17\xaf\xb8\ \xee\xfa\x5d\x02\x8b\xe9\xd7\xb9\x02\x06\x32\x96\x27\xc2\xb8\xe2\ \x7a\xb3\xd5\x21\xb0\x98\x7e\x9e\x29\x60\x20\x63\x92\xcc\xde\x4a\ \xdd\xb8\xe2\x7a\xc3\x0b\x09\xd6\x3f\x92\x0a\x18\xc8\x58\x1c\x67\ \x0b\x54\x37\xce\xb9\x5e\x77\x43\x82\xf5\xf7\x43\x05\x0c\x64\x3c\ \xef\x38\x83\x1f\x18\x87\x5c\xaf\x35\xdb\x04\x16\x93\xb7\x03\x05\ \x0c\x64\x6c\x38\xca\x14\x8c\x13\xae\x57\x1b\x01\xc1\xfa\xeb\x9e\ \x02\x06\x32\xb6\xe2\x7a\xa5\x11\x10\xac\xbf\xec\x2a\x60\x20\x63\ \xab\xa8\x5e\xf7\x09\x3c\x1e\x3f\xef\x28\xb0\x1e\x8d\xad\xb8\xee\ \xd4\x7c\x82\xf5\xa7\x6d\x05\x0c\x64\x6c\xc5\xf5\x9f\x5a\x8b\x60\ \xfd\x51\x03\x03\x19\x5b\x45\xf5\x6a\x8b\x60\xfd\x61\x53\x81\xf5\ \x68\x6c\xc5\xf5\x72\xd5\x23\xf0\xd8\x4a\xc6\x56\x5c\xff\xae\x78\ \x71\x88\xea\x8e\x1b\x07\xae\x97\x1c\x37\x0e\x5c\x8f\x4b\x22\xfd\ \x07\x5d\xb2\xe7\xb2\x6f\xdb\xf3\x18\x00\x00\x00\x00\x49\x45\x4e\ \x44\xae\x42\x60\x82\ \x00\x00\x01\xbb\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x2a\x49\x44\x41\x54\ \x48\x4b\xbd\xd2\xbd\x4e\x02\x51\x10\x86\x61\x2f\xce\x8b\xf0\x52\ \xb0\xb6\x33\xb1\xa0\x33\x84\xce\x68\x67\xcf\x2d\xd8\x50\xd9\xd8\ \xa0\x2e\xbb\x2c\xfb\xc7\xfe\xc2\x02\x36\xce\xc9\x7c\x3b\x26\x07\ \x23\xb8\x27\x43\xf2\x16\x84\x6f\xf2\x9c\x66\x2f\x6e\x87\x63\xbd\ \x8c\xde\xb4\x7b\x8d\xa0\xd7\x9b\x9d\x46\x67\xd1\xab\xf5\x56\xa3\ \xb3\xe8\x65\xd3\xfe\xd1\xe8\x69\x72\x79\x75\x7d\x18\xfd\x6f\x5d\ \x5a\x9d\xa4\x53\x87\x0f\x1c\xa5\x29\xe8\x45\xbd\x39\xda\xe8\xf1\ \xe7\x01\xfa\x6d\xad\xbf\xf6\x0f\x9d\xe2\x07\x4e\xa4\x29\xe8\x79\ \xb5\xd6\x08\xfa\xaa\x6c\x34\x82\x9e\x15\xb5\x46\xd0\xd3\xbc\xd6\ \x08\x7a\x9c\x55\x1a\x41\x8f\xd2\x92\x93\x6f\xce\x25\xd1\xa0\x2f\ \x93\x82\xb3\xee\xfa\x25\x1a\xf4\x30\xce\x39\xeb\xae\x5f\xa2\x41\ \x5f\x44\x39\x67\xdd\xf5\x4b\x34\xe8\xc1\x72\xc5\xd1\xf6\x15\x4d\ \x5c\x22\x41\x34\xe8\x7e\x98\x71\xb4\xed\xfd\x67\x97\x48\x10\x0d\ \xfa\x3c\xcc\x38\xa3\xcf\x1e\x5c\x22\x41\xb4\x4e\x5f\xa4\x1c\x6d\ \xbb\xb7\x7b\x97\x8c\xde\x69\xd0\xbd\x20\xe5\x68\xdb\xbe\xde\xb9\ \x44\x82\x68\xd0\x3f\x83\x84\x33\xfa\xf4\xc6\x25\x12\x44\xeb\x74\ \x3f\xe1\x68\x6b\x5f\x06\x2e\x19\xbd\xd3\xa0\x7f\xf8\x31\x47\x9b\ \x7b\xa2\x41\x7f\x9f\xc7\x1a\x75\xba\x17\x69\x04\x7d\xe6\x45\x1a\ \x41\xd7\x6a\x38\xfe\x06\x3c\xec\xc9\x88\xb5\xd8\x55\x59\x00\x00\ \x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x02\x62\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x02\x04\x49\x44\x41\x54\x78\x9c\xed\ \xd6\xb1\x0d\x03\x01\x0c\x03\x31\x21\x13\x64\xff\x65\x3f\x6d\xbc\ \x80\x2d\xe0\xa9\x8e\xfd\x15\x4a\xe6\xbe\xfc\x2e\x7f\xfe\xf5\x3c\ \x4f\xf8\x9d\x3e\x2f\x91\x6f\xfc\x49\xba\x4a\xe4\x7d\xff\xaf\xa6\ \x4c\xde\xb1\x0f\xc0\x49\x0a\x4a\xe4\x1b\xfb\x00\x3c\x56\x53\x26\ \xef\xd8\x07\xe0\x24\x05\x25\xf2\x8d\x7d\x00\x1e\xab\x29\x93\x77\ \xec\x03\x70\x92\x82\x12\xf9\xc6\x3e\x00\x8f\xd5\x94\xc9\x3b\xf6\ \x01\x38\x49\x41\x89\x7c\x63\x1f\x80\xc7\x6a\xca\xe4\x1d\xfb\x00\ \x9c\xa4\xa0\x44\xbe\xb1\x0f\xc0\x63\x35\x65\xf2\x8e\x7d\x00\x4e\ \x52\x50\x22\xdf\xd8\x07\xe0\xb1\x9a\x32\x79\xc7\x3e\x00\x27\x29\ \x28\x91\x6f\xec\x03\xf0\x58\x4d\x99\xbc\x63\x1f\x80\x93\x14\x94\ \xc8\x37\xf6\x01\x78\xac\xa6\x4c\xde\xb1\x0f\xc0\x49\x0a\x4a\xe4\ \x1b\xfb\x00\x3c\x56\x53\x26\xef\xd8\x07\xe0\x24\x05\x25\xf2\x8d\ \x7d\x00\x1e\xab\x29\x93\x77\xec\x03\x70\x92\x82\x12\xf9\xc6\x3e\ \x00\x8f\xd5\x94\xc9\x3b\xf6\x01\x38\x49\x41\x89\x7c\x63\x1f\x80\ \xc7\x6a\xca\xe4\x1d\xfb\x00\x9c\xa4\xa0\x44\xbe\xb1\x0f\xc0\x63\ \x35\x65\xf2\x8e\x7d\x00\x4e\x52\x50\x22\xdf\xd8\x07\xe0\xb1\x9a\ \x32\x79\xc7\x3e\x00\x27\x29\x28\x91\x6f\xec\x03\xf0\x58\x4d\x99\ \xbc\x63\x1f\x80\x93\x14\x94\xc8\x37\xf6\x01\x78\xac\xa6\x4c\xde\ \xb1\x0f\xc0\x49\x0a\x4a\xe4\x1b\xfb\x00\x3c\x56\x53\x26\xef\xd8\ \x07\xe0\x24\x05\x25\xf2\x8d\x7d\x00\x1e\xab\x29\x93\x77\xec\x03\ \x70\x92\x82\x12\xf9\xc6\x3e\x00\x8f\xd5\x94\xc9\x3b\xf6\x01\x38\ \x49\x41\x89\x7c\x63\x1f\x80\xc7\x6a\xca\xe4\x1d\xfb\x00\x9c\xa4\ \xa0\x44\xbe\xb1\x0f\xc0\x63\x35\x65\xf2\x8e\x7d\x00\x4e\x52\x50\ \x22\xdf\xd8\x07\xe0\xb1\x9a\x32\x79\xc7\x3e\x00\x27\x29\x28\x91\ \x6f\xec\x03\xf0\x58\x4d\x99\xbc\x63\x1f\x80\x93\x14\x94\xc8\x37\ \xf6\x01\x78\xac\xa6\x4c\xde\xb1\x0f\xc0\x49\x0a\x4a\xe4\x1b\xfb\ \x00\x3c\x56\x53\x26\xef\xd8\x07\xe0\x24\x05\x25\xf2\x8d\x7d\x00\ \x1e\xab\x29\x93\x77\xec\x03\x70\x92\x82\x12\xf9\xc6\x3e\x00\x8f\ \xd5\x94\xc9\x3b\xf6\x01\x38\x49\x41\x89\x7c\x63\x1f\x80\xc7\x6a\ \xca\xe4\x1d\xfb\x00\x9c\xa4\xa0\x44\xbe\xb1\x0f\xc0\x63\x35\x65\ \xf2\x8e\x7d\x00\x4e\x52\x50\x22\xdf\xd8\x07\xe0\xb1\x9a\x32\x79\ \xc7\x3e\x00\x27\x29\x28\x91\x6f\xec\x03\xbc\xdc\x3f\xe4\x79\x69\ \xe9\x67\xab\xcf\x62\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ " qt_resource_name = b"\ \x00\x0b\ \x05\x55\xc9\xe3\ \x00\x64\ \x00\x6f\x00\x63\x00\x6b\x00\x5f\x00\x69\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\ \x00\x0d\ \x0c\x46\x04\x47\ \x00\x63\ \x00\x72\x00\x6f\x00\x73\x00\x73\x00\x5f\x00\x62\x00\x6f\x00\x78\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0a\ \x0a\xc8\x6f\xe7\ \x00\x63\ \x00\x65\x00\x6e\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x10\ \x0c\x5a\x16\x47\ \x00\x63\ \x00\x72\x00\x6f\x00\x73\x00\x73\x00\x5f\x00\x65\x00\x78\x00\x5f\x00\x62\x00\x6f\x00\x78\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x11\ \x05\x0d\xa3\xa7\ \x00\x74\ \x00\x68\x00\x69\x00\x6e\x00\x5f\x00\x76\x00\x65\x00\x72\x00\x74\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x2e\x00\x70\x00\x6e\x00\x67\ \ \x00\x0f\ \x0b\x70\x3f\xe7\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x6e\x00\x6f\x00\x72\x00\x74\x00\x68\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0d\ \x04\x14\x00\x47\ \x00\x67\ \x00\x75\x00\x69\x00\x64\x00\x65\x00\x5f\x00\x62\x00\x6f\x00\x78\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x12\ \x0a\x7a\xa0\x07\ \x00\x73\ \x00\x70\x00\x6c\x00\x69\x00\x74\x00\x5f\x00\x76\x00\x65\x00\x72\x00\x74\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x2e\x00\x70\x00\x6e\ \x00\x67\ \x00\x10\ \x04\xfc\x40\xa7\ \x00\x62\ \x00\x61\x00\x72\x00\x5f\x00\x76\x00\x65\x00\x72\x00\x74\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0e\ \x0b\x8a\xe6\x07\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x65\x00\x61\x00\x73\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x14\ \x0b\x9f\xd1\x07\ \x00\x73\ \x00\x70\x00\x6c\x00\x69\x00\x74\x00\x5f\x00\x68\x00\x6f\x00\x72\x00\x69\x00\x7a\x00\x6f\x00\x6e\x00\x74\x00\x61\x00\x6c\x00\x2e\ \x00\x70\x00\x6e\x00\x67\ \x00\x12\ \x0d\x7f\x14\x07\ \x00\x62\ \x00\x61\x00\x72\x00\x5f\x00\x68\x00\x6f\x00\x72\x00\x69\x00\x7a\x00\x6f\x00\x6e\x00\x74\x00\x61\x00\x6c\x00\x2e\x00\x70\x00\x6e\ \x00\x67\ \x00\x13\ \x0c\x9c\x17\xe7\ \x00\x74\ \x00\x68\x00\x69\x00\x6e\x00\x5f\x00\x68\x00\x6f\x00\x72\x00\x69\x00\x7a\x00\x6f\x00\x6e\x00\x74\x00\x61\x00\x6c\x00\x2e\x00\x70\ \x00\x6e\x00\x67\ \x00\x0e\ \x0f\x8a\xe0\xc7\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x77\x00\x65\x00\x73\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0f\ \x0e\x70\x21\xe7\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x73\x00\x6f\x00\x75\x00\x74\x00\x68\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0e\ \x07\x04\x9f\x87\ \x00\x62\ \x00\x61\x00\x63\x00\x6b\x00\x67\x00\x72\x00\x6f\x00\x75\x00\x6e\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\ " qt_resource_struct = b"\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x0f\x00\x00\x00\x02\ \x00\x00\x00\xc8\x00\x00\x00\x00\x00\x01\x00\x00\x0a\xb3\ \x00\x00\x01\x12\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x3f\ \x00\x00\x00\x7c\x00\x00\x00\x00\x00\x01\x00\x00\x07\x12\ \x00\x00\x02\x24\x00\x00\x00\x00\x00\x01\x00\x00\x18\xfc\ \x00\x00\x00\xe8\x00\x00\x00\x00\x00\x01\x00\x00\x0b\x79\ \x00\x00\x00\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x02\x65\ \x00\x00\x00\xa4\x00\x00\x00\x00\x00\x01\x00\x00\x08\xf7\ \x00\x00\x01\x38\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x91\ \x00\x00\x01\x5a\x00\x00\x00\x00\x00\x01\x00\x00\x10\xb7\ \x00\x00\x00\x1c\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ \x00\x00\x00\x56\x00\x00\x00\x00\x00\x01\x00\x00\x04\x43\ \x00\x00\x01\xb2\x00\x00\x00\x00\x00\x01\x00\x00\x13\xd0\ \x00\x00\x01\x88\x00\x00\x00\x00\x00\x01\x00\x00\x12\xdf\ \x00\x00\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x17\x3d\ \x00\x00\x01\xde\x00\x00\x00\x00\x00\x01\x00\x00\x15\x13\ " def qInitResources(): QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) def qCleanupResources(): QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) qInitResources()
56.173469
129
0.724493
7,914
33,030
3.021481
0.038539
0.115674
0.080545
0.027099
0.470475
0.444128
0.436601
0.431206
0.406867
0.396956
0
0.35067
0.01974
33,030
587
130
56.269165
0.387856
0.006237
0
0.243433
0
0.816112
0
0
0
1
0.000244
0
0
1
0.003503
false
0
0.001751
0
0.005254
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
4
424e802f4dbcaee8a96d6d1b8408c135132d6dce
203
py
Python
0x03-python-data_structures/5-no_c.py
darkares23/holbertonschool-higher_level_programming
931b1b701d8a1d990b7cd931486496c0b5502e21
[ "MIT" ]
null
null
null
0x03-python-data_structures/5-no_c.py
darkares23/holbertonschool-higher_level_programming
931b1b701d8a1d990b7cd931486496c0b5502e21
[ "MIT" ]
null
null
null
0x03-python-data_structures/5-no_c.py
darkares23/holbertonschool-higher_level_programming
931b1b701d8a1d990b7cd931486496c0b5502e21
[ "MIT" ]
null
null
null
#!/usr/bin/python3 def no_c(my_string): newStr = "" for i in range(len(my_string)): if my_string[i] != "c" and my_string[i] != "C": newStr += my_string[i] return (newStr)
25.375
55
0.55665
32
203
3.34375
0.53125
0.373832
0.252336
0.186916
0
0
0
0
0
0
0
0.006757
0.270936
203
7
56
29
0.716216
0.083744
0
0
0
0
0.010811
0
0
0
0
0
0
1
0.166667
false
0
0
0
0.333333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
424ec4b4bdb2aecf125f957628d8c4ad4964ec90
139
py
Python
week3/user/urls.py
RomulusGwelt/advanced-django2019
6b8d4ce2f829456da1e00ddb8c357608d001aad2
[ "MIT" ]
null
null
null
week3/user/urls.py
RomulusGwelt/advanced-django2019
6b8d4ce2f829456da1e00ddb8c357608d001aad2
[ "MIT" ]
null
null
null
week3/user/urls.py
RomulusGwelt/advanced-django2019
6b8d4ce2f829456da1e00ddb8c357608d001aad2
[ "MIT" ]
null
null
null
from django.urls import path from rest_framework_jwt.views import obtain_jwt_token urlpatterns = [ path('login/', obtain_jwt_token), ]
23.166667
53
0.784173
20
139
5.15
0.65
0.174757
0.271845
0
0
0
0
0
0
0
0
0
0.129496
139
6
54
23.166667
0.85124
0
0
0
0
0
0.042857
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
425f3c9dc68cbe9337f3205d8269bee68f00d246
201
py
Python
app/api/serializers/time_entry_serializers.py
nabaz/projecttracker
c6b326592f7a6925b2fbc0924350dd0951beca0f
[ "MIT" ]
null
null
null
app/api/serializers/time_entry_serializers.py
nabaz/projecttracker
c6b326592f7a6925b2fbc0924350dd0951beca0f
[ "MIT" ]
null
null
null
app/api/serializers/time_entry_serializers.py
nabaz/projecttracker
c6b326592f7a6925b2fbc0924350dd0951beca0f
[ "MIT" ]
null
null
null
from api.models import TimeEntry from rest_framework import serializers class TimeEntrySerializers(serializers.ModelSerializer): class Meta: model = TimeEntry fields = '__all__'
20.1
56
0.746269
20
201
7.25
0.75
0
0
0
0
0
0
0
0
0
0
0
0.20398
201
9
57
22.333333
0.90625
0
0
0
0
0
0.034826
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
4277eacc148a7b98a23d9c3d7372e795895a0156
1,725
py
Python
tests/test_default_ssl.py
aboedo/sqlalchemy-redshift
bf81bb81e42987bb81345845fede560d9184302f
[ "MIT" ]
1
2019-06-04T21:01:13.000Z
2019-06-04T21:01:13.000Z
tests/test_default_ssl.py
aboedo/sqlalchemy-redshift
bf81bb81e42987bb81345845fede560d9184302f
[ "MIT" ]
1
2020-05-23T10:54:44.000Z
2020-05-23T10:54:44.000Z
tests/test_default_ssl.py
aboedo/sqlalchemy-redshift
bf81bb81e42987bb81345845fede560d9184302f
[ "MIT" ]
1
2020-12-24T10:20:24.000Z
2020-12-24T10:20:24.000Z
import sqlalchemy as sa CERT = b"""-----BEGIN CERTIFICATE----- MIIDeDCCAuGgAwIBAgIJALPHPDcjk979MA0GCSqGSIb3DQEBBQUAMIGFMQswCQYD VQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHU2VhdHRsZTET MBEGA1UEChMKQW1hem9uLmNvbTELMAkGA1UECxMCQ00xLTArBgkqhkiG9w0BCQEW HmNvb2tpZS1tb25zdGVyLWNvcmVAYW1hem9uLmNvbTAeFw0xMjExMDIyMzI0NDda Fw0xNzExMDEyMzI0NDdaMIGFMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGlu Z3RvbjEQMA4GA1UEBxMHU2VhdHRsZTETMBEGA1UEChMKQW1hem9uLmNvbTELMAkG A1UECxMCQ00xLTArBgkqhkiG9w0BCQEWHmNvb2tpZS1tb25zdGVyLWNvcmVAYW1h em9uLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw949t4UZ+9n1K8vj PVkyehoV2kWepDmJ8YKl358nkmNwrSAGkslVttdpZS+FrgIcb44UbfVbB4bOSq0J qd39GYVRzSazCwr2tpibFvH87PyAX4VVUBDlCizJToEYsXkAKecs+IRqCDWG2ht/ pibO2+T5Wp8jaxUBvDmoHY3BSgkCAwEAAaOB7TCB6jAdBgNVHQ4EFgQUE5KUaWSM Uml+6MZQia7DjmfjvLgwgboGA1UdIwSBsjCBr4AUE5KUaWSMUml+6MZQia7Djmfj vLihgYukgYgwgYUxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw DgYDVQQHEwdTZWF0dGxlMRMwEQYDVQQKEwpBbWF6b24uY29tMQswCQYDVQQLEwJD TTEtMCsGCSqGSIb3DQEJARYeY29va2llLW1vbnN0ZXItY29yZUBhbWF6b24uY29t ggkAs8c8NyOT3v0wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOBgQCYZSRQ zJNHXyKACrqMB5j1baUGf5NA0cZ/8s5iWeC9Gkwi7cXyiq9OrBaUtJBzAJTzfWbH dfVaBL5FWuQsbkJWHe0mV+l4Kzl5bh/FSDSkhYR1duYRmdCXckQk6mAF6xG+1mpn 8YlJmbEhkDmBgJ8C8p0LCMNaO2xFLlNU0O+0ng== -----END CERTIFICATE----- """ def test_ssl_args(): engine = sa.create_engine('redshift+psycopg2://test') dialect = engine.dialect url = engine.url cargs, cparams = dialect.create_connect_args(url) assert cargs == [] assert cparams.pop('host') == 'test' assert cparams.pop('sslmode') == 'verify-full' with open(cparams.pop('sslrootcert'), 'rb') as cert: assert cert.read() == CERT assert cparams == {}
42.073171
64
0.868406
92
1,725
16.228261
0.695652
0.026122
0.021433
0
0
0
0
0
0
0
0
0.088474
0.069565
1,725
40
65
43.125
0.841745
0
0
0
0
0
0.769855
0.704928
0
1
0
0
0.147059
1
0.029412
false
0
0.029412
0
0.058824
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
4
428957763c1aa9d60abef794d1033af5d2c14d70
1,108
py
Python
typedcollections/test.py
dhilst/typedcollections
23a7307f2aaa0ec6e20b8d6ea8b217f3aa8d3a50
[ "Apache-2.0" ]
1
2019-09-26T18:18:06.000Z
2019-09-26T18:18:06.000Z
typedcollections/test.py
dhilst/typedcollections
23a7307f2aaa0ec6e20b8d6ea8b217f3aa8d3a50
[ "Apache-2.0" ]
null
null
null
typedcollections/test.py
dhilst/typedcollections
23a7307f2aaa0ec6e20b8d6ea8b217f3aa8d3a50
[ "Apache-2.0" ]
null
null
null
import unittest from . import MultiTypedDict, MultiTypedList, TypedList, TypedDict class MyMTD(MultiTypedDict): i = int, s = str, class MyTD(TypedDict): value_type = int, class MyMTL(MultiTypedList): type = int,int,int class MyTL(TypedList): type = int, class Test(unittest.TestCase): def test(self): mtd = MyMTD(i=1, s='hello') mtl = MyMTL(1,2,3) td = MyTD(a=1,b=1,c=1) tl = MyTL(1,2) if __debug__: self.assertRaises(TypeError, MyMTD,1,2) self.assertRaises(TypeError, MyMTL,'str') self.assertRaises(TypeError, lambda: MyTD(a='str')) self.assertRaises(TypeError, MyTL,'str') def raiseTE(): mtd['i'] = 'not an int' self.assertRaises(TypeError, raiseTE) def raiseTE(): mtl[0] = 'str' self.assertRaises(TypeError, raiseTE) def raiseTE(): td['a'] = 'str' self.assertRaises(TypeError, raiseTE) def raiseTE(): tl[0] = 'str' self.assertRaises(TypeError, raiseTE)
22.16
66
0.563177
126
1,108
4.912698
0.333333
0.206785
0.323102
0.226171
0.321486
0.273021
0.145396
0
0
0
0
0.016971
0.308664
1,108
49
67
22.612245
0.791123
0
0
0.133333
0
0
0.031617
0
0
0
0
0
0.266667
1
0.166667
false
0
0.066667
0
0.566667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
35ef0ae9adbcecb9e8a08f50d55b218f1dd4ebd9
576
py
Python
sfa/admin.py
yashiki-takajin/sfa-next
049058a37b9ee45b58be5f4393a0b3191362043c
[ "MIT" ]
19
2018-11-23T10:13:14.000Z
2022-03-26T11:57:55.000Z
sfa/admin.py
yashiki-takajin/sfa-next
049058a37b9ee45b58be5f4393a0b3191362043c
[ "MIT" ]
3
2020-06-05T19:25:20.000Z
2021-06-10T20:59:30.000Z
sfa/admin.py
yashiki-takajin/sfa-next
049058a37b9ee45b58be5f4393a0b3191362043c
[ "MIT" ]
8
2019-04-21T11:08:22.000Z
2021-12-08T09:38:30.000Z
from django.contrib import admin from django.conf import settings from django.contrib.auth.admin import UserAdmin from django.contrib.auth.forms import UserChangeForm, UserCreationForm from django.utils.translation import ugettext_lazy as _ # Register your models here. from .models import CustomerInfo, ContactInfo, AddressInfo @admin.register(CustomerInfo) class CustomerInfoAdmin(admin.ModelAdmin): pass @admin.register(ContactInfo) class ContactInfoAdmin(admin.ModelAdmin): pass @admin.register(AddressInfo) class AddressInfoAdmin(admin.ModelAdmin): pass
26.181818
70
0.821181
67
576
7.029851
0.447761
0.106157
0.10828
0.089172
0.135881
0
0
0
0
0
0
0
0.109375
576
21
71
27.428571
0.918129
0.045139
0
0.2
0
0
0
0
0
0
0
0
0
1
0
true
0.2
0.4
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
35f2b583bdde280d0c378ff7523999bf83cc0747
1,011
py
Python
source/rpg.py
chowdaryprasaad/Python-Random-Password-Generator
e810715f3358dcaec159765dc71b95559430e6bb
[ "MIT" ]
10
2019-10-15T07:38:53.000Z
2022-03-01T17:21:50.000Z
source/rpg.py
chowdaryprasaad/Python-Random-Password-Generator
e810715f3358dcaec159765dc71b95559430e6bb
[ "MIT" ]
3
2019-07-18T19:42:09.000Z
2019-08-06T07:17:21.000Z
source/rpg.py
ismailtasdelen/Python-Randon-Password-Generator
b62f1833ea326d7ad934e77924f83bfa9d2481ba
[ "MIT" ]
17
2020-01-17T20:07:42.000Z
2022-02-20T16:27:02.000Z
#!/usr/bin/env python # -*- coding:utf-8 -*- import string import random def random_password_generator(): chars = string.ascii_uppercase + string.ascii_lowercase + string.digits size = 8 return ''.join(random.choice(chars) for x in range(size, 20)) def random_password_generator_ico(): random_password_generator_ico = """ ############################################################# # PYTHON - Random Password Generetor (RPG) - GH0ST S0FTWARE # ############################################################# # CONTACT # ############################################################# # DEVELOPER : İSMAİL TAŞDELEN # # Mail Address : pentestdatabase@gmail.com # # LINKEDIN : https://www.linkedin.com/in/ismailtasdelen # # Whatsapp : + 90 534 295 94 31 # ############################################################# """ print(random_password_generator_ico)
38.884615
75
0.447082
84
1,011
5.25
0.654762
0.15873
0.208617
0.176871
0
0
0
0
0
0
0
0.023286
0.23541
1,011
25
76
40.44
0.544631
0.040554
0
0.2
0
0
0.652893
0.277893
0
0
0
0
0
1
0.1
false
0.25
0.1
0
0.25
0.05
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
c4277ffdfccc49eb138855098ba686d211931bf9
520
py
Python
tests/unit/test_diffusion2d_functions.py
sab-inf/testing-python-exercise
83938966322b37f417d8f76e83c42851f856e7b2
[ "CC-BY-4.0" ]
null
null
null
tests/unit/test_diffusion2d_functions.py
sab-inf/testing-python-exercise
83938966322b37f417d8f76e83c42851f856e7b2
[ "CC-BY-4.0" ]
1
2022-01-20T06:10:30.000Z
2022-01-20T06:10:30.000Z
tests/unit/test_diffusion2d_functions.py
sab-inf/testing-python-exercise
83938966322b37f417d8f76e83c42851f856e7b2
[ "CC-BY-4.0" ]
16
2022-01-13T13:31:15.000Z
2022-01-19T17:42:42.000Z
""" Tests for functions in class SolveDiffusion2D """ from diffusion2d import SolveDiffusion2D def test_initialize_domain(): """ Check function SolveDiffusion2D.initialize_domain """ solver = SolveDiffusion2D() def test_initialize_physical_parameters(): """ Checks function SolveDiffusion2D.initialize_domain """ solver = SolveDiffusion2D() def test_set_initial_condition(): """ Checks function SolveDiffusion2D.get_initial_function """ solver = SolveDiffusion2D()
19.259259
57
0.726923
46
520
7.956522
0.5
0.155738
0.188525
0.180328
0.377049
0.377049
0.377049
0.377049
0
0
0
0.021277
0.186538
520
26
58
20
0.843972
0.384615
0
0.428571
0
0
0
0
0
0
0
0
0
1
0.428571
false
0
0.142857
0
0.571429
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
c428e6856a3c08cda06817e9a636e40f9bb36701
28
py
Python
tdclient/version.py
minchuang/td-client-python
6cf6dfbb60119f400274491d3e942d4f9fbcebd6
[ "Apache-2.0" ]
2
2019-02-22T11:56:17.000Z
2019-02-25T10:09:46.000Z
tdclient/version.py
minchuang/td-client-python
6cf6dfbb60119f400274491d3e942d4f9fbcebd6
[ "Apache-2.0" ]
null
null
null
tdclient/version.py
minchuang/td-client-python
6cf6dfbb60119f400274491d3e942d4f9fbcebd6
[ "Apache-2.0" ]
null
null
null
__version__ = "0.12.1.dev0"
14
27
0.678571
5
28
3
1
0
0
0
0
0
0
0
0
0
0
0.2
0.107143
28
1
28
28
0.4
0
0
0
0
0
0.392857
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c47aa102369a24662f4ad636e755afa0f84d0b0d
272
py
Python
Models/__init__.py
Sijiu/Xbill
b4f18d3e7db3b17ed4ccddc6a8971c25931428eb
[ "MIT" ]
14
2020-03-15T13:40:02.000Z
2021-06-15T18:04:08.000Z
Models/__init__.py
Sijiu/Xbill
b4f18d3e7db3b17ed4ccddc6a8971c25931428eb
[ "MIT" ]
1
2020-05-24T13:14:46.000Z
2020-05-24T13:14:46.000Z
Models/__init__.py
Sijiu/Xbill
b4f18d3e7db3b17ed4ccddc6a8971c25931428eb
[ "MIT" ]
3
2020-05-05T00:23:36.000Z
2021-06-10T01:18:16.000Z
from .BaseModel import database, BillModel from .AlipayBill import AlipayBill from .WeChatBill import WeChatBill from .ICBCBill import ICBCBill from .XBill import XBill def create_table(): database.create_tables([AlipayBill, WeChatBill, ICBCBill, XBill], safe=True)
27.2
80
0.805147
33
272
6.575758
0.454545
0
0
0
0
0
0
0
0
0
0
0
0.125
272
9
81
30.222222
0.911765
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
true
0
0.714286
0
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
67135b947e455619e93cbb7468d877d070fe3de2
52
py
Python
homeassistant/components/hp_ilo/__init__.py
domwillcode/home-assistant
f170c80bea70c939c098b5c88320a1c789858958
[ "Apache-2.0" ]
30,023
2016-04-13T10:17:53.000Z
2020-03-02T12:56:31.000Z
homeassistant/components/hp_ilo/__init__.py
jagadeeshvenkatesh/core
1bd982668449815fee2105478569f8e4b5670add
[ "Apache-2.0" ]
31,101
2020-03-02T13:00:16.000Z
2022-03-31T23:57:36.000Z
homeassistant/components/hp_ilo/__init__.py
jagadeeshvenkatesh/core
1bd982668449815fee2105478569f8e4b5670add
[ "Apache-2.0" ]
11,956
2016-04-13T18:42:31.000Z
2020-03-02T09:32:12.000Z
"""The HP Integrated Lights-Out (iLO) component."""
26
51
0.692308
7
52
5.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.115385
52
1
52
52
0.782609
0.865385
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
673c943da50db2408c849b762d87686654ba51ac
457
py
Python
tests/unit/test_trustar.py
trustar/trustar-sdk2-proto
cd0840ad494b95de3f687327da354ad9bdec2040
[ "Apache-2.0" ]
2
2021-07-23T15:36:41.000Z
2021-11-08T07:37:42.000Z
tests/unit/test_trustar.py
trustar/trustar-sdk2-proto
cd0840ad494b95de3f687327da354ad9bdec2040
[ "Apache-2.0" ]
5
2021-01-25T19:34:35.000Z
2021-07-15T21:51:27.000Z
tests/unit/test_trustar.py
trustar/trustar-sdk2-proto
cd0840ad494b95de3f687327da354ad9bdec2040
[ "Apache-2.0" ]
1
2021-11-11T21:26:59.000Z
2021-11-11T21:26:59.000Z
import pytest from trustar2.trustar import TruStar proxy = {"https": "https://user:pass@le.proxy.com", "http":None} @pytest.fixture def trustar_with_proxy(): return TruStar(api_key="xxxx", api_secret="xxx", client_metatag="test_env", proxy=proxy) def test_trustar_proxy(ts): assert ts.get_proxy() == {} def test_trustar_with_proxy(trustar_with_proxy): assert trustar_with_proxy.get_proxy() == {"https": "https://user:pass@le.proxy.com"}
24.052632
92
0.728665
67
457
4.716418
0.432836
0.139241
0.202532
0.120253
0.208861
0.208861
0.208861
0.208861
0
0
0
0.002469
0.113786
457
18
93
25.388889
0.777778
0
0
0
0
0
0.194748
0
0
0
0
0
0.2
1
0.3
false
0.2
0.2
0.1
0.6
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
6748eb432f0ce02b2ea331f12ad9566ed68138c5
193
py
Python
masrt_files/emarald/secret.py
Masrt200/Glimpse-of-ISM
6d7e33e77bcb0f7fb92b4dc0e2d93a892032385e
[ "MIT" ]
2
2021-04-24T15:02:09.000Z
2021-04-24T15:04:54.000Z
masrt_files/emarald/secret.py
Masrt200/Glimpse-of-ISM
6d7e33e77bcb0f7fb92b4dc0e2d93a892032385e
[ "MIT" ]
null
null
null
masrt_files/emarald/secret.py
Masrt200/Glimpse-of-ISM
6d7e33e77bcb0f7fb92b4dc0e2d93a892032385e
[ "MIT" ]
null
null
null
flag="OYE, jaa 4 plate emarald se cheese maggi aur patties le kar aa... aur bolna kharcha mere khate mie likh dene!! ISM: TOH_SANDEEP_KO_BULANA_PADTA_HAI" binary_password="C(uiICD@CADDEBNEEDD"
64.333333
154
0.797927
34
193
4.352941
0.970588
0
0
0
0
0
0
0
0
0
0
0.005917
0.124352
193
2
155
96.5
0.869822
0
0
0
0
0.5
0.860104
0.160622
0
0
0
0
0
1
0
false
0.5
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
676eb843df977f69a7e25324ba95618804a09b08
87
py
Python
test/jar_lister.py
xingao267/rules_scala
f2647ed38afc845eb09cc656e7e98cc2b6b6f3d7
[ "Apache-2.0" ]
1
2021-04-28T21:40:28.000Z
2021-04-28T21:40:28.000Z
test/jar_lister.py
xingao267/rules_scala
f2647ed38afc845eb09cc656e7e98cc2b6b6f3d7
[ "Apache-2.0" ]
null
null
null
test/jar_lister.py
xingao267/rules_scala
f2647ed38afc845eb09cc656e7e98cc2b6b6f3d7
[ "Apache-2.0" ]
null
null
null
import zipfile import sys for n in zipfile.ZipFile(sys.argv[1]).namelist(): print n
14.5
49
0.735632
15
87
4.266667
0.666667
0
0
0
0
0
0
0
0
0
0
0.013514
0.149425
87
5
50
17.4
0.851351
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.5
null
null
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
677342d9d6104157815cbedb182c63d8e9332527
207
py
Python
backend/forum/poll/apps.py
karolyi/forum-django
a498be3123deb836e0108258c493b88c645b2163
[ "MIT" ]
7
2016-09-20T11:49:49.000Z
2017-06-24T23:51:56.000Z
backend/forum/poll/apps.py
karolyi/forum-django
a498be3123deb836e0108258c493b88c645b2163
[ "MIT" ]
17
2019-12-22T10:41:48.000Z
2021-11-17T10:58:50.000Z
backend/forum/poll/apps.py
karolyi/forum-django
a498be3123deb836e0108258c493b88c645b2163
[ "MIT" ]
1
2016-09-20T11:50:57.000Z
2016-09-20T11:50:57.000Z
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class PollConfig(AppConfig): name = 'forum.poll' verbose_name = _('Forum: Polls') label = 'forum_poll'
23
55
0.7343
26
207
5.653846
0.692308
0.136054
0
0
0
0
0
0
0
0
0
0
0.173913
207
8
56
25.875
0.859649
0
0
0
0
0
0.154589
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
677efba8e008a0bb2388dded6d554bd064972009
6,808
py
Python
torch/nn/modules/instancenorm.py
UmaTaru/run
be29e4d41a4de3dee27cd6796801bfe51382d294
[ "MIT" ]
null
null
null
torch/nn/modules/instancenorm.py
UmaTaru/run
be29e4d41a4de3dee27cd6796801bfe51382d294
[ "MIT" ]
null
null
null
torch/nn/modules/instancenorm.py
UmaTaru/run
be29e4d41a4de3dee27cd6796801bfe51382d294
[ "MIT" ]
null
null
null
from .batchnorm import _BatchNorm from .. import functional as F class _InstanceNorm(_BatchNorm): def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=False): super(_InstanceNorm, self).__init__( num_features, eps, momentum, affine) def forward(self, input): self._check_input_dim(input) b, c = input.size(0), input.size(1) # Repeat stored stats and affine transform params running_mean = self.running_mean.repeat(b) running_var = self.running_var.repeat(b) weight, bias = None, None if self.affine: weight = self.weight.repeat(b) bias = self.bias.repeat(b) # Apply instance norm input_reshaped = input.contiguous().view(1, b * c, *input.size()[2:]) out = F.batch_norm( input_reshaped, running_mean, running_var, weight, bias, self.training, self.momentum, self.eps) # Reshape back self.running_mean.copy_(running_mean.view(b, c).mean(0)) self.running_var.copy_(running_var.view(b, c).mean(0)) return out.view(b, c, *input.size()[2:]) def eval(self): return self class InstanceNorm1d(_InstanceNorm): r"""Applies Instance Normalization over a 2d or 3d input that is seen as a mini-batch. .. math:: y = \frac{x - mean[x]}{ \sqrt{Var[x]} + \epsilon} * gamma + beta The mean and standard-deviation are calculated per-dimension separately for each object in a mini-batch. Gamma and beta are learnable parameter vectors of size C (where C is the input size). During training, this layer keeps a running estimate of its computed mean and variance. The running sum is kept with a default momentum of 0.1. At evaluation time (`.eval()`), the default behaviour of the InstanceNorm module stays the same i.e. running mean/variance is NOT used for normalization. One can force using stored mean and variance with `.train(False)` method. Args: num_features: num_features from an expected input of size `batch_size x num_features x width` eps: a value added to the denominator for numerical stability. Default: 1e-5 momentum: the value used for the running_mean and running_var computation. Default: 0.1 affine: a boolean value that when set to true, gives the layer learnable affine parameters. Shape: - Input: :math:`(N, C, L)` - Output: :math:`(N, C, L)` (same shape as input) Examples: >>> # Without Learnable Parameters >>> m = nn.InstanceNorm1d(100) >>> # With Learnable Parameters >>> m = nn.InstanceNorm1d(100, affine=True) >>> input = autograd.Variable(torch.randn(20, 100)) >>> output = m(input) """ def _check_input_dim(self, input): if input.dim() != 3: raise ValueError('expected 2D or 3D input (got {}D input)' .format(input.dim())) super(InstanceNorm1d, self)._check_input_dim(input) class InstanceNorm2d(_InstanceNorm): r"""Applies Instance Normalization over a 4d input that is seen as a mini-batch of 3d inputs .. math:: y = \frac{x - mean[x]}{ \sqrt{Var[x]} + \epsilon} * gamma + beta The mean and standard-deviation are calculated per-dimension separately for each object in a mini-batch. Gamma and beta are learnable parameter vectors of size C (where C is the input size). During training, this layer keeps a running estimate of its computed mean and variance. The running sum is kept with a default momentum of 0.1. At evaluation time (`.eval()`), the default behaviour of the InstanceNorm module stays the same i.e. running mean/variance is NOT used for normalization. One can force using stored mean and variance with `.train(False)` method. Args: num_features: num_features from an expected input of size batch_size x num_features x height x width eps: a value added to the denominator for numerical stability. Default: 1e-5 momentum: the value used for the running_mean and running_var computation. Default: 0.1 affine: a boolean value that when set to true, gives the layer learnable affine parameters. Shape: - Input: :math:`(N, C, H, W)` - Output: :math:`(N, C, H, W)` (same shape as input) Examples: >>> # Without Learnable Parameters >>> m = nn.InstanceNorm2d(100) >>> # With Learnable Parameters >>> m = nn.InstanceNorm2d(100, affine=True) >>> input = autograd.Variable(torch.randn(20, 100, 35, 45)) >>> output = m(input) """ def _check_input_dim(self, input): if input.dim() != 4: raise ValueError('expected 4D input (got {}D input)' .format(input.dim())) super(InstanceNorm2d, self)._check_input_dim(input) class InstanceNorm3d(_InstanceNorm): r"""Applies Instance Normalization over a 5d input that is seen as a mini-batch of 4d inputs .. math:: y = \frac{x - mean[x]}{ \sqrt{Var[x]} + \epsilon} * gamma + beta The mean and standard-deviation are calculated per-dimension separately for each object in a mini-batch. Gamma and beta are learnable parameter vectors of size C (where C is the input size). During training, this layer keeps a running estimate of its computed mean and variance. The running sum is kept with a default momentum of 0.1. At evaluation time (`.eval()`), the default behaviour of the InstanceNorm module stays the same i.e. running mean/variance is NOT used for normalization. One can force using stored mean and variance with `.train(False)` method. Args: num_features: num_features from an expected input of size batch_size x num_features x depth x height x width eps: a value added to the denominator for numerical stability. Default: 1e-5 momentum: the value used for the running_mean and running_var computation. Default: 0.1 affine: a boolean value that when set to true, gives the layer learnable affine parameters. Shape: - Input: :math:`(N, C, D, H, W)` - Output: :math:`(N, C, D, H, W)` (same shape as input) Examples: >>> # Without Learnable Parameters >>> m = nn.InstanceNorm3d(100) >>> # With Learnable Parameters >>> m = nn.InstanceNorm3d(100, affine=True) >>> input = autograd.Variable(torch.randn(20, 100, 35, 45, 10)) >>> output = m(input) """ def _check_input_dim(self, input): if input.dim() != 5: raise ValueError('expected 5D input (got {}D input)' .format(input.dim())) super(InstanceNorm3d, self)._check_input_dim(input)
39.581395
116
0.649236
953
6,808
4.563484
0.182581
0.023914
0.020924
0.030352
0.805932
0.785238
0.726604
0.694872
0.6659
0.652564
0
0.019094
0.253819
6,808
171
117
39.812866
0.837008
0.660693
0
0.133333
0
0
0.053111
0
0
0
0
0
0
1
0.133333
false
0
0.044444
0.022222
0.311111
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
67945fc4b1d6bafe6d5374d497cb5e491444e1ee
284
py
Python
dnachisel/DnaOptimizationProblem/__init__.py
simone-pignotti/DnaChisel
b7f0f925c9daefcc5fec903a13cfa74c3b726a7a
[ "MIT" ]
124
2017-11-14T14:42:25.000Z
2022-03-31T08:02:07.000Z
dnachisel/DnaOptimizationProblem/__init__.py
simone-pignotti/DnaChisel
b7f0f925c9daefcc5fec903a13cfa74c3b726a7a
[ "MIT" ]
65
2017-11-15T07:25:38.000Z
2022-01-31T10:38:45.000Z
dnachisel/DnaOptimizationProblem/__init__.py
simone-pignotti/DnaChisel
b7f0f925c9daefcc5fec903a13cfa74c3b726a7a
[ "MIT" ]
31
2018-10-18T12:59:47.000Z
2022-02-11T16:54:43.000Z
from .NoSolutionError import NoSolutionError from .DnaOptimizationProblem import DnaOptimizationProblem from .CircularDnaOptimizationProblem import CircularDnaOptimizationProblem __all__ = [ "NoSolutionError", "DnaOptimizationProblem", "CircularDnaOptimizationProblem" ]
28.4
74
0.838028
16
284
14.625
0.375
0
0
0
0
0
0
0
0
0
0
0
0.112676
284
9
75
31.555556
0.928571
0
0
0
0
0
0.235915
0.183099
0
0
0
0
0
1
0
false
0
0.375
0
0.375
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
67a8e93a9f23b908a8d8906475339104f28301c6
64,778
py
Python
1-XD_XD/code/v9s.py
sethahrenbach/BuildingDetectors_Round2
19545b6babd176bcca76ce36df4c34ce9fe98056
[ "Apache-2.0" ]
196
2017-07-30T12:51:00.000Z
2022-03-22T12:16:23.000Z
1-XD_XD/code/v9s.py
sethahrenbach/BuildingDetectors_Round2
19545b6babd176bcca76ce36df4c34ce9fe98056
[ "Apache-2.0" ]
7
2017-10-23T06:21:50.000Z
2022-03-10T10:17:42.000Z
1-XD_XD/code/v9s.py
sethahrenbach/BuildingDetectors_Round2
19545b6babd176bcca76ce36df4c34ce9fe98056
[ "Apache-2.0" ]
81
2017-07-30T14:10:25.000Z
2021-11-15T04:15:06.000Z
# -*- coding: utf-8 -*- """ v9s model * Input: v5_im Author: Kohei <i@ho.lc> """ from logging import getLogger, Formatter, StreamHandler, INFO, FileHandler from pathlib import Path import subprocess import argparse import math import glob import sys import json import re import warnings import scipy import tqdm import click import tables as tb import pandas as pd import numpy as np from keras.models import Model from keras.engine.topology import merge as merge_l from keras.layers import ( Input, Convolution2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Activation, BatchNormalization) from keras.optimizers import Adam from keras.callbacks import ModelCheckpoint, EarlyStopping, History from keras import backend as K import skimage.transform import skimage.morphology import rasterio.features import shapely.wkt import shapely.ops import shapely.geometry MODEL_NAME = 'v9s' ORIGINAL_SIZE = 650 INPUT_SIZE = 256 LOGFORMAT = '%(asctime)s %(levelname)s %(message)s' BASE_DIR = "/data/train" WORKING_DIR = "/data/working" IMAGE_DIR = "/data/working/images/{}".format('v5') MODEL_DIR = "/data/working/models/{}".format(MODEL_NAME) FN_SOLUTION_CSV = "/data/output/{}.csv".format(MODEL_NAME) # Parameters MIN_POLYGON_AREA = 30 # Input files FMT_TRAIN_SUMMARY_PATH = str( Path(BASE_DIR) / Path("{prefix:s}_Train/") / Path("summaryData/{prefix:s}_Train_Building_Solutions.csv")) FMT_TRAIN_RGB_IMAGE_PATH = str( Path(BASE_DIR) / Path("{prefix:s}_Train/") / Path("RGB-PanSharpen/RGB-PanSharpen_{image_id:s}.tif")) FMT_TEST_RGB_IMAGE_PATH = str( Path(BASE_DIR) / Path("{prefix:s}_Test_public/") / Path("RGB-PanSharpen/RGB-PanSharpen_{image_id:s}.tif")) FMT_TRAIN_MSPEC_IMAGE_PATH = str( Path(BASE_DIR) / Path("{prefix:s}_Train/") / Path("MUL-PanSharpen/MUL-PanSharpen_{image_id:s}.tif")) FMT_TEST_MSPEC_IMAGE_PATH = str( Path(BASE_DIR) / Path("{prefix:s}_Test_public/") / Path("MUL-PanSharpen/MUL-PanSharpen_{image_id:s}.tif")) # Preprocessing result FMT_BANDCUT_TH_PATH = IMAGE_DIR + "/bandcut{}.csv" FMT_MUL_BANDCUT_TH_PATH = IMAGE_DIR + "/mul_bandcut{}.csv" # Image list, Image container and mask container FMT_VALTRAIN_IMAGELIST_PATH = IMAGE_DIR + "/{prefix:s}_valtrain_ImageId.csv" FMT_VALTEST_IMAGELIST_PATH = IMAGE_DIR + "/{prefix:s}_valtest_ImageId.csv" FMT_VALTRAIN_IM_STORE = IMAGE_DIR + "/valtrain_{}_im.h5" FMT_VALTEST_IM_STORE = IMAGE_DIR + "/valtest_{}_im.h5" FMT_VALTRAIN_MASK_STORE = IMAGE_DIR + "/valtrain_{}_mask.h5" FMT_VALTEST_MASK_STORE = IMAGE_DIR + "/valtest_{}_mask.h5" FMT_VALTRAIN_MUL_STORE = IMAGE_DIR + "/valtrain_{}_mul.h5" FMT_VALTEST_MUL_STORE = IMAGE_DIR + "/valtest_{}_mul.h5" FMT_TRAIN_IMAGELIST_PATH = IMAGE_DIR + "/{prefix:s}_train_ImageId.csv" FMT_TEST_IMAGELIST_PATH = IMAGE_DIR + "/{prefix:s}_test_ImageId.csv" FMT_TRAIN_IM_STORE = IMAGE_DIR + "/train_{}_im.h5" FMT_TEST_IM_STORE = IMAGE_DIR + "/test_{}_im.h5" FMT_TRAIN_MASK_STORE = IMAGE_DIR + "/train_{}_mask.h5" FMT_TRAIN_MUL_STORE = IMAGE_DIR + "/train_{}_mul.h5" FMT_TEST_MUL_STORE = IMAGE_DIR + "/test_{}_mul.h5" FMT_IMMEAN = IMAGE_DIR + "/{}_immean.h5" FMT_MULMEAN = IMAGE_DIR + "/{}_mulmean.h5" # Model files FMT_VALMODEL_PATH = MODEL_DIR + "/{}_val_weights.h5" FMT_FULLMODEL_PATH = MODEL_DIR + "/{}_full_weights.h5" FMT_VALMODEL_HIST = MODEL_DIR + "/{}_val_hist.csv" FMT_VALMODEL_EVALHIST = MODEL_DIR + "/{}_val_evalhist.csv" FMT_VALMODEL_EVALTHHIST = MODEL_DIR + "/{}_val_evalhist_th.csv" # Prediction & polygon result FMT_TESTPRED_PATH = MODEL_DIR + "/{}_pred.h5" FMT_VALTESTPRED_PATH = MODEL_DIR + "/{}_eval_pred.h5" FMT_VALTESTPOLY_PATH = MODEL_DIR + "/{}_eval_poly.csv" FMT_VALTESTTRUTH_PATH = MODEL_DIR + "/{}_eval_poly_truth.csv" FMT_VALTESTPOLY_OVALL_PATH = MODEL_DIR + "/eval_poly.csv" FMT_VALTESTTRUTH_OVALL_PATH = MODEL_DIR + "/eval_poly_truth.csv" FMT_TESTPOLY_PATH = MODEL_DIR + "/{}_poly.csv" # Model related files (others) FMT_VALMODEL_LAST_PATH = MODEL_DIR + "/{}_val_weights_last.h5" FMT_FULLMODEL_LAST_PATH = MODEL_DIR + "/{}_full_weights_last.h5" # Logger warnings.simplefilter("ignore", UserWarning) handler = StreamHandler() handler.setLevel(INFO) handler.setFormatter(Formatter(LOGFORMAT)) fh_handler = FileHandler(".{}.log".format(MODEL_NAME)) fh_handler.setFormatter(Formatter(LOGFORMAT)) logger = getLogger('spacenet2') logger.setLevel(INFO) if __name__ == '__main__': logger.addHandler(handler) logger.addHandler(fh_handler) # Fix seed for reproducibility np.random.seed(1145141919) def directory_name_to_area_id(datapath): """ Directory name to AOI number Usage: >>> directory_name_to_area_id("/data/test/AOI_2_Vegas") 2 """ dir_name = Path(datapath).name if dir_name.startswith('AOI_2_Vegas'): return 2 elif dir_name.startswith('AOI_3_Paris'): return 3 elif dir_name.startswith('AOI_4_Shanghai'): return 4 elif dir_name.startswith('AOI_5_Khartoum'): return 5 else: raise RuntimeError("Unsupported city id is given.") def _remove_interiors(line): if "), (" in line: line_prefix = line.split('), (')[0] line_terminate = line.split('))",')[-1] line = ( line_prefix + '))",' + line_terminate ) return line def __load_band_cut_th(band_fn, bandsz=3): df = pd.read_csv(band_fn, index_col='area_id') all_band_cut_th = {area_id: {} for area_id in range(2, 6)} for area_id, row in df.iterrows(): for chan_i in range(bandsz): all_band_cut_th[area_id][chan_i] = dict( min=row['chan{}_min'.format(chan_i)], max=row['chan{}_max'.format(chan_i)], ) return all_band_cut_th def _calc_fscore_per_aoi(area_id): prefix = area_id_to_prefix(area_id) truth_file = FMT_VALTESTTRUTH_PATH.format(prefix) poly_file = FMT_VALTESTPOLY_PATH.format(prefix) cmd = [ 'java', '-jar', '/root/visualizer-2.0/visualizer.jar', '-truth', truth_file, '-solution', poly_file, '-no-gui', '-band-triplets', '/root/visualizer-2.0/data/band-triplets.txt', '-image-dir', 'pass', ] proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout_data, stderr_data = proc.communicate() lines = [line for line in stdout_data.decode('utf8').split('\n')[-10:]] """ Overall F-score : 0.85029 AOI_2_Vegas: TP : 27827 FP : 4999 FN : 4800 Precision: 0.847712 Recall : 0.852883 F-score : 0.85029 """ if stdout_data.decode('utf8').strip().endswith("Overall F-score : 0"): overall_fscore = 0 tp = 0 fp = 0 fn = 0 precision = 0 recall = 0 fscore = 0 elif len(lines) > 0 and lines[0].startswith("Overall F-score : "): assert lines[0].startswith("Overall F-score : ") assert lines[2].startswith("AOI_") assert lines[3].strip().startswith("TP") assert lines[4].strip().startswith("FP") assert lines[5].strip().startswith("FN") assert lines[6].strip().startswith("Precision") assert lines[7].strip().startswith("Recall") assert lines[8].strip().startswith("F-score") overall_fscore = float(re.findall("([\d\.]+)", lines[0])[0]) tp = int(re.findall("(\d+)", lines[3])[0]) fp = int(re.findall("(\d+)", lines[4])[0]) fn = int(re.findall("(\d+)", lines[5])[0]) precision = float(re.findall("([\d\.]+)", lines[6])[0]) recall = float(re.findall("([\d\.]+)", lines[7])[0]) fscore = float(re.findall("([\d\.]+)", lines[8])[0]) else: logger.warn("Unexpected data >>> " + stdout_data.decode('utf8')) raise RuntimeError("Unsupported format") return { 'overall_fscore': overall_fscore, 'tp': tp, 'fp': fp, 'fn': fn, 'precision': precision, 'recall': recall, 'fscore': fscore, } def prefix_to_area_id(prefix): area_dict = { 'AOI_2_Vegas': 2, 'AOI_3_Paris': 3, 'AOI_4_Shanghai': 4, 'AOI_5_Khartoum': 5, } return area_dict[area_id] def area_id_to_prefix(area_id): area_dict = { 2: 'AOI_2_Vegas', 3: 'AOI_3_Paris', 4: 'AOI_4_Shanghai', 5: 'AOI_5_Khartoum', } return area_dict[area_id] # --------------------------------------------------------- # main def _get_model_parameter(area_id): prefix = area_id_to_prefix(area_id) fn_hist = FMT_VALMODEL_EVALTHHIST.format(prefix) best_row = pd.read_csv(fn_hist).sort_values( by='fscore', ascending=False, ).iloc[0] param = dict( fn_epoch=int(best_row['zero_base_epoch']), min_poly_area=int(best_row['min_area_th']), ) return param def get_resized_raster_3chan_image(image_id, band_cut_th=None): fn = train_image_id_to_path(image_id) with rasterio.open(fn, 'r') as f: values = f.read().astype(np.float32) for chan_i in range(3): min_val = band_cut_th[chan_i]['min'] max_val = band_cut_th[chan_i]['max'] values[chan_i] = np.clip(values[chan_i], min_val, max_val) values[chan_i] = (values[chan_i] - min_val) / (max_val - min_val) values = np.swapaxes(values, 0, 2) values = np.swapaxes(values, 0, 1) values = skimage.transform.resize(values, (INPUT_SIZE, INPUT_SIZE)) return values def get_resized_raster_3chan_image_test(image_id, band_cut_th=None): fn = test_image_id_to_path(image_id) with rasterio.open(fn, 'r') as f: values = f.read().astype(np.float32) for chan_i in range(3): min_val = band_cut_th[chan_i]['min'] max_val = band_cut_th[chan_i]['max'] values[chan_i] = np.clip(values[chan_i], min_val, max_val) values[chan_i] = (values[chan_i] - min_val) / (max_val - min_val) values = np.swapaxes(values, 0, 2) values = np.swapaxes(values, 0, 1) values = skimage.transform.resize(values, (INPUT_SIZE, INPUT_SIZE)) return values def image_mask_resized_from_summary(df, image_id): im_mask = np.zeros((650, 650)) for idx, row in df[df.ImageId == image_id].iterrows(): shape_obj = shapely.wkt.loads(row.PolygonWKT_Pix) if shape_obj.exterior is not None: coords = list(shape_obj.exterior.coords) x = [round(float(pp[0])) for pp in coords] y = [round(float(pp[1])) for pp in coords] yy, xx = skimage.draw.polygon(y, x, (650, 650)) im_mask[yy, xx] = 1 interiors = shape_obj.interiors for interior in interiors: coords = list(interior.coords) x = [round(float(pp[0])) for pp in coords] y = [round(float(pp[1])) for pp in coords] yy, xx = skimage.draw.polygon(y, x, (650, 650)) im_mask[yy, xx] = 0 im_mask = skimage.transform.resize(im_mask, (INPUT_SIZE, INPUT_SIZE)) im_mask = (im_mask > 0.5).astype(np.uint8) return im_mask def train_test_image_prep(area_id): prefix = area_id_to_prefix(area_id) df_train = pd.read_csv( FMT_TRAIN_IMAGELIST_PATH.format(prefix=prefix), index_col='ImageId') df_test = pd.read_csv( FMT_TEST_IMAGELIST_PATH.format(prefix=prefix), index_col='ImageId') band_cut_th = __load_band_cut_th( FMT_BANDCUT_TH_PATH.format(prefix))[area_id] df_summary = _load_train_summary_data(area_id) fn = FMT_TRAIN_IM_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_train.index, total=len(df_train)): im = get_resized_raster_3chan_image(image_id, band_cut_th) atom = tb.Atom.from_dtype(im.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im.shape, filters=filters) ds[:] = im fn = FMT_TEST_IM_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_test.index, total=len(df_test)): im = get_resized_raster_3chan_image_test(image_id, band_cut_th) atom = tb.Atom.from_dtype(im.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im.shape, filters=filters) ds[:] = im fn = FMT_TRAIN_MASK_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_train.index, total=len(df_train)): im_mask = image_mask_resized_from_summary(df_summary, image_id) atom = tb.Atom.from_dtype(im_mask.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im_mask.shape, filters=filters) ds[:] = im_mask def valtrain_test_image_prep(area_id): prefix = area_id_to_prefix(area_id) logger.info("valtrain_test_image_prep for {}".format(prefix)) df_train = pd.read_csv( FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix), index_col='ImageId') df_test = pd.read_csv( FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix), index_col='ImageId') band_cut_th = __load_band_cut_th( FMT_BANDCUT_TH_PATH.format(prefix))[area_id] df_summary = _load_train_summary_data(area_id) fn = FMT_VALTRAIN_IM_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_train.index, total=len(df_train)): im = get_resized_raster_3chan_image(image_id, band_cut_th) atom = tb.Atom.from_dtype(im.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im.shape, filters=filters) ds[:] = im fn = FMT_VALTEST_IM_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_test.index, total=len(df_test)): im = get_resized_raster_3chan_image(image_id, band_cut_th) atom = tb.Atom.from_dtype(im.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im.shape, filters=filters) ds[:] = im fn = FMT_VALTRAIN_MASK_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_train.index, total=len(df_train)): im_mask = image_mask_resized_from_summary(df_summary, image_id) atom = tb.Atom.from_dtype(im_mask.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im_mask.shape, filters=filters) ds[:] = im_mask fn = FMT_VALTEST_MASK_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_test.index, total=len(df_test)): im_mask = image_mask_resized_from_summary(df_summary, image_id) atom = tb.Atom.from_dtype(im_mask.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im_mask.shape, filters=filters) ds[:] = im_mask def train_test_mul_image_prep(area_id): prefix = area_id_to_prefix(area_id) df_train = pd.read_csv( FMT_TRAIN_IMAGELIST_PATH.format(prefix=prefix), index_col='ImageId') df_test = pd.read_csv( FMT_TEST_IMAGELIST_PATH.format(prefix=prefix), index_col='ImageId') band_rgb_th = __load_band_cut_th( FMT_BANDCUT_TH_PATH.format(prefix))[area_id] band_mul_th = __load_band_cut_th( FMT_MUL_BANDCUT_TH_PATH.format(prefix), bandsz=8)[area_id] df_summary = _load_train_summary_data(area_id) fn = FMT_TRAIN_MUL_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_train.index, total=len(df_train)): im = get_resized_raster_8chan_image( image_id, band_rgb_th, band_mul_th) atom = tb.Atom.from_dtype(im.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im.shape, filters=filters) ds[:] = im fn = FMT_TEST_MUL_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_test.index, total=len(df_test)): im = get_resized_raster_8chan_image_test( image_id, band_rgb_th, band_mul_th) atom = tb.Atom.from_dtype(im.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im.shape, filters=filters) ds[:] = im def valtrain_test_mul_image_prep(area_id): prefix = area_id_to_prefix(area_id) logger.info("valtrain_test_image_prep for {}".format(prefix)) df_train = pd.read_csv( FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix), index_col='ImageId') df_test = pd.read_csv( FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix), index_col='ImageId') band_rgb_th = __load_band_cut_th( FMT_BANDCUT_TH_PATH.format(prefix))[area_id] band_mul_th = __load_band_cut_th( FMT_MUL_BANDCUT_TH_PATH.format(prefix), bandsz=8)[area_id] df_summary = _load_train_summary_data(area_id) fn = FMT_VALTRAIN_MUL_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_train.index, total=len(df_train)): im = get_resized_raster_8chan_image( image_id, band_rgb_th, band_mul_th) atom = tb.Atom.from_dtype(im.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im.shape, filters=filters) ds[:] = im fn = FMT_VALTEST_MUL_STORE.format(prefix) logger.info("Prepare image container: {}".format(fn)) with tb.open_file(fn, 'w') as f: for image_id in tqdm.tqdm(df_test.index, total=len(df_test)): im = get_resized_raster_8chan_image( image_id, band_rgb_th, band_mul_th) atom = tb.Atom.from_dtype(im.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, image_id, atom, im.shape, filters=filters) ds[:] = im def _load_train_summary_data(area_id): prefix = area_id_to_prefix(area_id) fn = FMT_TRAIN_SUMMARY_PATH.format(prefix=prefix) df = pd.read_csv(fn) return df def split_val_train_test(area_id): prefix = area_id_to_prefix(area_id) df = _load_train_summary_data(area_id) df_agg = df.groupby('ImageId').agg('first') image_id_list = df_agg.index.tolist() np.random.shuffle(image_id_list) sz_valtrain = int(len(image_id_list) * 0.7) sz_valtest = len(image_id_list) - sz_valtrain pd.DataFrame({'ImageId': image_id_list[:sz_valtrain]}).to_csv( FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix), index=False) pd.DataFrame({'ImageId': image_id_list[sz_valtrain:]}).to_csv( FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix), index=False) def train_image_id_to_mspec_path(image_id): prefix = image_id_to_prefix(image_id) fn = FMT_TRAIN_MSPEC_IMAGE_PATH.format( prefix=prefix, image_id=image_id) return fn def test_image_id_to_mspec_path(image_id): prefix = image_id_to_prefix(image_id) fn = FMT_TEST_MSPEC_IMAGE_PATH.format( prefix=prefix, image_id=image_id) return fn def train_image_id_to_path(image_id): prefix = image_id_to_prefix(image_id) fn = FMT_TRAIN_RGB_IMAGE_PATH.format( prefix=prefix, image_id=image_id) return fn def test_image_id_to_path(image_id): prefix = image_id_to_prefix(image_id) fn = FMT_TEST_RGB_IMAGE_PATH.format( prefix=prefix, image_id=image_id) return fn def image_id_to_prefix(image_id): prefix = image_id.split('img')[0][:-1] return prefix def calc_multiband_cut_threshold(area_id): rows = [] band_cut_th = __calc_multiband_cut_threshold(area_id) prefix = area_id_to_prefix(area_id) row = dict(prefix=area_id_to_prefix(area_id)) row['area_id'] = area_id for chan_i in band_cut_th.keys(): row['chan{}_max'.format(chan_i)] = band_cut_th[chan_i]['max'] row['chan{}_min'.format(chan_i)] = band_cut_th[chan_i]['min'] rows.append(row) pd.DataFrame(rows).to_csv(FMT_BANDCUT_TH_PATH.format(prefix), index=False) def __calc_multiband_cut_threshold(area_id): prefix = area_id_to_prefix(area_id) band_values = {k: [] for k in range(3)} band_cut_th = {k: dict(max=0, min=0) for k in range(3)} image_id_list = pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format( prefix=prefix)).ImageId.tolist() for image_id in tqdm.tqdm(image_id_list[:500]): image_fn = train_image_id_to_path(image_id) with rasterio.open(image_fn, 'r') as f: values = f.read().astype(np.float32) for i_chan in range(3): values_ = values[i_chan].ravel().tolist() values_ = np.array( [v for v in values_ if v != 0] ) # Remove sensored mask band_values[i_chan].append(values_) image_id_list = pd.read_csv(FMT_VALTEST_IMAGELIST_PATH.format( prefix=prefix)).ImageId.tolist() for image_id in tqdm.tqdm(image_id_list[:500]): image_fn = train_image_id_to_path(image_id) with rasterio.open(image_fn, 'r') as f: values = f.read().astype(np.float32) for i_chan in range(3): values_ = values[i_chan].ravel().tolist() values_ = np.array( [v for v in values_ if v != 0] ) # Remove sensored mask band_values[i_chan].append(values_) for i_chan in range(3): band_values[i_chan] = np.concatenate( band_values[i_chan]).ravel() band_cut_th[i_chan]['max'] = scipy.percentile( band_values[i_chan], 98) band_cut_th[i_chan]['min'] = scipy.percentile( band_values[i_chan], 2) return band_cut_th def calc_mul_multiband_cut_threshold(area_id): rows = [] band_cut_th = __calc_mul_multiband_cut_threshold(area_id) prefix = area_id_to_prefix(area_id) row = dict(prefix=area_id_to_prefix(area_id)) row['area_id'] = area_id for chan_i in band_cut_th.keys(): row['chan{}_max'.format(chan_i)] = band_cut_th[chan_i]['max'] row['chan{}_min'.format(chan_i)] = band_cut_th[chan_i]['min'] rows.append(row) pd.DataFrame(rows).to_csv( FMT_MUL_BANDCUT_TH_PATH.format(prefix), index=False) def __calc_mul_multiband_cut_threshold(area_id): prefix = area_id_to_prefix(area_id) band_values = {k: [] for k in range(8)} band_cut_th = {k: dict(max=0, min=0) for k in range(8)} image_id_list = pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format( prefix=prefix)).ImageId.tolist() for image_id in tqdm.tqdm(image_id_list[:500]): image_fn = train_image_id_to_mspec_path(image_id) with rasterio.open(image_fn, 'r') as f: values = f.read().astype(np.float32) for i_chan in range(8): values_ = values[i_chan].ravel().tolist() values_ = np.array( [v for v in values_ if v != 0] ) # Remove sensored mask band_values[i_chan].append(values_) image_id_list = pd.read_csv(FMT_VALTEST_IMAGELIST_PATH.format( prefix=prefix)).ImageId.tolist() for image_id in tqdm.tqdm(image_id_list[:500]): image_fn = train_image_id_to_mspec_path(image_id) with rasterio.open(image_fn, 'r') as f: values = f.read().astype(np.float32) for i_chan in range(8): values_ = values[i_chan].ravel().tolist() values_ = np.array( [v for v in values_ if v != 0] ) # Remove sensored mask band_values[i_chan].append(values_) for i_chan in range(8): band_values[i_chan] = np.concatenate( band_values[i_chan]).ravel() band_cut_th[i_chan]['max'] = scipy.percentile( band_values[i_chan], 98) band_cut_th[i_chan]['min'] = scipy.percentile( band_values[i_chan], 2) return band_cut_th def get_unet(): conv_params = dict(activation='relu', border_mode='same') merge_params = dict(mode='concat', concat_axis=1) inputs = Input((8, 256, 256)) conv1 = Convolution2D(32, 3, 3, **conv_params)(inputs) conv1 = Convolution2D(32, 3, 3, **conv_params)(conv1) pool1 = MaxPooling2D(pool_size=(2, 2))(conv1) conv2 = Convolution2D(64, 3, 3, **conv_params)(pool1) conv2 = Convolution2D(64, 3, 3, **conv_params)(conv2) pool2 = MaxPooling2D(pool_size=(2, 2))(conv2) conv3 = Convolution2D(128, 3, 3, **conv_params)(pool2) conv3 = Convolution2D(128, 3, 3, **conv_params)(conv3) pool3 = MaxPooling2D(pool_size=(2, 2))(conv3) conv4 = Convolution2D(256, 3, 3, **conv_params)(pool3) conv4 = Convolution2D(256, 3, 3, **conv_params)(conv4) pool4 = MaxPooling2D(pool_size=(2, 2))(conv4) conv5 = Convolution2D(512, 3, 3, **conv_params)(pool4) conv5 = Convolution2D(512, 3, 3, **conv_params)(conv5) up6 = merge_l([UpSampling2D(size=(2, 2))(conv5), conv4], **merge_params) conv6 = Convolution2D(256, 3, 3, **conv_params)(up6) conv6 = Convolution2D(256, 3, 3, **conv_params)(conv6) up7 = merge_l([UpSampling2D(size=(2, 2))(conv6), conv3], **merge_params) conv7 = Convolution2D(128, 3, 3, **conv_params)(up7) conv7 = Convolution2D(128, 3, 3, **conv_params)(conv7) up8 = merge_l([UpSampling2D(size=(2, 2))(conv7), conv2], **merge_params) conv8 = Convolution2D(64, 3, 3, **conv_params)(up8) conv8 = Convolution2D(64, 3, 3, **conv_params)(conv8) up9 = merge_l([UpSampling2D(size=(2, 2))(conv8), conv1], **merge_params) conv9 = Convolution2D(32, 3, 3, **conv_params)(up9) conv9 = Convolution2D(32, 3, 3, **conv_params)(conv9) conv10 = Convolution2D(1, 1, 1, activation='sigmoid')(conv9) adam = Adam() model = Model(input=inputs, output=conv10) model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy', jaccard_coef, jaccard_coef_int]) return model def jaccard_coef(y_true, y_pred): smooth = 1e-12 intersection = K.sum(y_true * y_pred, axis=[0, -1, -2]) sum_ = K.sum(y_true + y_pred, axis=[0, -1, -2]) jac = (intersection + smooth) / (sum_ - intersection + smooth) return K.mean(jac) def jaccard_coef_int(y_true, y_pred): smooth = 1e-12 y_pred_pos = K.round(K.clip(y_pred, 0, 1)) intersection = K.sum(y_true * y_pred_pos, axis=[0, -1, -2]) sum_ = K.sum(y_true + y_pred_pos, axis=[0, -1, -2]) jac = (intersection + smooth) / (sum_ - intersection + smooth) return K.mean(jac) def generate_test_batch(area_id, batch_size=64, immean=None, enable_tqdm=False): prefix = area_id_to_prefix(area_id) df_test = pd.read_csv(FMT_TEST_IMAGELIST_PATH.format(prefix=prefix)) fn_im = FMT_TEST_MUL_STORE.format(prefix) image_id_list = df_test.ImageId.tolist() if enable_tqdm: pbar = tqdm.tqdm(total=len(image_id_list)) while 1: total_sz = len(image_id_list) n_batch = int(math.floor(total_sz / batch_size) + 1) with tb.open_file(fn_im, 'r') as f_im: for i_batch in range(n_batch): target_image_ids = image_id_list[ i_batch*batch_size:(i_batch+1)*batch_size ] if len(target_image_ids) == 0: continue X_test = [] y_test = [] for image_id in target_image_ids: im = np.array(f_im.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_test.append(im) mask = np.zeros((INPUT_SIZE, INPUT_SIZE)).astype(np.uint8) y_test.append(mask) X_test = np.array(X_test) y_test = np.array(y_test) y_test = y_test.reshape((-1, 1, INPUT_SIZE, INPUT_SIZE)) if immean is not None: X_test = X_test - immean if enable_tqdm: pbar.update(y_test.shape[0]) yield (X_test, y_test) if enable_tqdm: pbar.close() def get_resized_raster_8chan_image_test(image_id, band_rgb_th, band_mul_th): """ RGB + multispectral (total: 8 channels) """ im = [] fn = test_image_id_to_path(image_id) with rasterio.open(fn, 'r') as f: values = f.read().astype(np.float32) for chan_i in range(3): min_val = band_rgb_th[chan_i]['min'] max_val = band_rgb_th[chan_i]['max'] values[chan_i] = np.clip(values[chan_i], min_val, max_val) values[chan_i] = (values[chan_i] - min_val) / (max_val - min_val) im.append(skimage.transform.resize( values[chan_i], (INPUT_SIZE, INPUT_SIZE))) fn = test_image_id_to_mspec_path(image_id) with rasterio.open(fn, 'r') as f: values = f.read().astype(np.float32) usechannels = [1, 2, 5, 6, 7] for chan_i in usechannels: min_val = band_mul_th[chan_i]['min'] max_val = band_mul_th[chan_i]['max'] values[chan_i] = np.clip(values[chan_i], min_val, max_val) values[chan_i] = (values[chan_i] - min_val) / (max_val - min_val) im.append(skimage.transform.resize( values[chan_i], (INPUT_SIZE, INPUT_SIZE))) im = np.array(im) # (ch, w, h) im = np.swapaxes(im, 0, 2) # -> (h, w, ch) im = np.swapaxes(im, 0, 1) # -> (w, h, ch) return im def get_resized_raster_8chan_image(image_id, band_rgb_th, band_mul_th): """ RGB + multispectral (total: 8 channels) """ im = [] fn = train_image_id_to_path(image_id) with rasterio.open(fn, 'r') as f: values = f.read().astype(np.float32) for chan_i in range(3): min_val = band_rgb_th[chan_i]['min'] max_val = band_rgb_th[chan_i]['max'] values[chan_i] = np.clip(values[chan_i], min_val, max_val) values[chan_i] = (values[chan_i] - min_val) / (max_val - min_val) im.append(skimage.transform.resize( values[chan_i], (INPUT_SIZE, INPUT_SIZE))) fn = train_image_id_to_mspec_path(image_id) with rasterio.open(fn, 'r') as f: values = f.read().astype(np.float32) usechannels = [1, 2, 5, 6, 7] for chan_i in usechannels: min_val = band_mul_th[chan_i]['min'] max_val = band_mul_th[chan_i]['max'] values[chan_i] = np.clip(values[chan_i], min_val, max_val) values[chan_i] = (values[chan_i] - min_val) / (max_val - min_val) im.append(skimage.transform.resize( values[chan_i], (INPUT_SIZE, INPUT_SIZE))) im = np.array(im) # (ch, w, h) im = np.swapaxes(im, 0, 2) # -> (h, w, ch) im = np.swapaxes(im, 0, 1) # -> (w, h, ch) return im def _get_train_mul_data(area_id): """ RGB + multispectral (total: 8 channels) """ prefix = area_id_to_prefix(area_id) fn_train = FMT_TRAIN_IMAGELIST_PATH.format(prefix=prefix) df_train = pd.read_csv(fn_train) X_train = [] fn_im = FMT_TRAIN_MUL_STORE.format(prefix) with tb.open_file(fn_im, 'r') as f: for idx, image_id in enumerate(df_train.ImageId.tolist()): im = np.array(f.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_train.append(im) X_train = np.array(X_train) y_train = [] fn_mask = FMT_TRAIN_MASK_STORE.format(prefix) with tb.open_file(fn_mask, 'r') as f: for idx, image_id in enumerate(df_train.ImageId.tolist()): mask = np.array(f.get_node('/' + image_id)) mask = (mask > 0.5).astype(np.uint8) y_train.append(mask) y_train = np.array(y_train) y_train = y_train.reshape((-1, 1, INPUT_SIZE, INPUT_SIZE)) return X_train, y_train def _get_test_mul_data(area_id): """ RGB + multispectral (total: 8 channels) """ prefix = area_id_to_prefix(area_id) fn_test = FMT_TEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test) X_test = [] fn_im = FMT_TEST_MUL_STORE.format(prefix) with tb.open_file(fn_im, 'r') as f: for idx, image_id in enumerate(df_test.ImageId.tolist()): im = np.array(f.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_test.append(im) X_test = np.array(X_test) return X_test def _get_valtest_mul_data(area_id): prefix = area_id_to_prefix(area_id) fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test) X_val = [] fn_im = FMT_VALTEST_MUL_STORE.format(prefix) with tb.open_file(fn_im, 'r') as f: for idx, image_id in enumerate(df_test.ImageId.tolist()): im = np.array(f.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_val.append(im) X_val = np.array(X_val) y_val = [] fn_mask = FMT_VALTEST_MASK_STORE.format(prefix) with tb.open_file(fn_mask, 'r') as f: for idx, image_id in enumerate(df_test.ImageId.tolist()): mask = np.array(f.get_node('/' + image_id)) mask = (mask > 0.5).astype(np.uint8) y_val.append(mask) y_val = np.array(y_val) y_val = y_val.reshape((-1, 1, INPUT_SIZE, INPUT_SIZE)) return X_val, y_val def _get_valtrain_mul_data(area_id): prefix = area_id_to_prefix(area_id) fn_train = FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix) df_train = pd.read_csv(fn_train) X_val = [] fn_im = FMT_VALTRAIN_MUL_STORE.format(prefix) with tb.open_file(fn_im, 'r') as f: for idx, image_id in enumerate(df_train.ImageId.tolist()): im = np.array(f.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_val.append(im) X_val = np.array(X_val) y_val = [] fn_mask = FMT_VALTRAIN_MASK_STORE.format(prefix) with tb.open_file(fn_mask, 'r') as f: for idx, image_id in enumerate(df_train.ImageId.tolist()): mask = np.array(f.get_node('/' + image_id)) mask = (mask > 0.5).astype(np.uint8) y_val.append(mask) y_val = np.array(y_val) y_val = y_val.reshape((-1, 1, INPUT_SIZE, INPUT_SIZE)) return X_val, y_val def get_mul_mean_image(area_id): prefix = area_id_to_prefix(area_id) with tb.open_file(FMT_MULMEAN.format(prefix), 'r') as f: im_mean = np.array(f.get_node('/mulmean')) return im_mean def preproc_stage3(area_id): prefix = area_id_to_prefix(area_id) if not Path(FMT_VALTEST_MUL_STORE.format(prefix)).exists(): valtrain_test_mul_image_prep(area_id) if not Path(FMT_TEST_MUL_STORE.format(prefix)).exists(): train_test_mul_image_prep(area_id) # mean image for subtract preprocessing X1, _ = _get_train_mul_data(area_id) X2 = _get_test_mul_data(area_id) X = np.vstack([X1, X2]) print(X.shape) X_mean = X.mean(axis=0) fn = FMT_MULMEAN.format(prefix) logger.info("Prepare mean image: {}".format(fn)) with tb.open_file(fn, 'w') as f: atom = tb.Atom.from_dtype(X_mean.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, 'mulmean', atom, X_mean.shape, filters=filters) ds[:] = X_mean def _internal_test_predict_best_param(area_id, save_pred=True): prefix = area_id_to_prefix(area_id) param = _get_model_parameter(area_id) epoch = param['fn_epoch'] min_th = param['min_poly_area'] # Prediction phase logger.info("Prediction phase: {}".format(prefix)) X_mean = get_mul_mean_image(area_id) # Load model weights # Predict and Save prediction result fn = FMT_TESTPRED_PATH.format(prefix) fn_model = FMT_VALMODEL_PATH.format(prefix + '_{epoch:02d}') fn_model = fn_model.format(epoch=epoch) model = get_unet() model.load_weights(fn_model) fn_test = FMT_TEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test, index_col='ImageId') y_pred = model.predict_generator( generate_test_batch( area_id, batch_size=64, immean=X_mean, enable_tqdm=True, ), val_samples=len(df_test), ) del model # Save prediction result if save_pred: with tb.open_file(fn, 'w') as f: atom = tb.Atom.from_dtype(y_pred.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, 'pred', atom, y_pred.shape, filters=filters) ds[:] = y_pred return y_pred def _internal_test(area_id, enable_tqdm=False): prefix = area_id_to_prefix(area_id) y_pred = _internal_test_predict_best_param(area_id, save_pred=False) param = _get_model_parameter(area_id) min_th = param['min_poly_area'] # Postprocessing phase logger.info("Postprocessing phase") fn_test = FMT_TEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test, index_col='ImageId') fn_out = FMT_TESTPOLY_PATH.format(prefix) with open(fn_out, 'w') as f: f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n") test_image_list = df_test.index.tolist() for idx, image_id in tqdm.tqdm(enumerate(test_image_list), total=len(test_image_list)): df_poly = mask_to_poly(y_pred[idx][0], min_polygon_area_th=min_th) if len(df_poly) > 0: for i, row in df_poly.iterrows(): line = "{},{},\"{}\",{:.6f}\n".format( image_id, row.bid, row.wkt, row.area_ratio) line = _remove_interiors(line) f.write(line) else: f.write("{},{},{},0\n".format( image_id, -1, "POLYGON EMPTY")) def validate_score(area_id): """ Calc competition score """ prefix = area_id_to_prefix(area_id) # Prediction phase if not Path(FMT_VALTESTPRED_PATH.format(prefix)).exists(): X_val, y_val = _get_valtest_mul_data(area_id) X_mean = get_mul_mean_image(area_id) # Load model weights # Predict and Save prediction result model = get_unet() model.load_weights(FMT_VALMODEL_PATH.format(prefix)) y_pred = model.predict(X_val - X_mean, batch_size=8, verbose=1) del model # Save prediction result fn = FMT_VALTESTPRED_PATH.format(prefix) with tb.open_file(fn, 'w') as f: atom = tb.Atom.from_dtype(y_pred.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, 'pred', atom, y_pred.shape, filters=filters) ds[:] = y_pred # Postprocessing phase if not Path(FMT_VALTESTPOLY_PATH.format(prefix)).exists(): fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test, index_col='ImageId') fn = FMT_VALTESTPRED_PATH.format(prefix) with tb.open_file(fn, 'r') as f: y_pred = np.array(f.get_node('/pred')) print(y_pred.shape) fn_out = FMT_VALTESTPOLY_PATH.format(prefix) with open(fn_out, 'w') as f: f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n") for idx, image_id in enumerate(df_test.index.tolist()): df_poly = mask_to_poly(y_pred[idx][0]) if len(df_poly) > 0: for i, row in df_poly.iterrows(): f.write("{},{},\"{}\",{:.6f}\n".format( image_id, row.bid, row.wkt, row.area_ratio)) else: f.write("{},{},{},0\n".format( image_id, -1, "POLYGON EMPTY")) # update fn_out with open(fn_out, 'r') as f: lines = f.readlines() with open(fn_out, 'w') as f: f.write(lines[0]) for line in lines[1:]: line = _remove_interiors(line) f.write(line) # Validation solution file if not Path(FMT_VALTESTTRUTH_PATH.format(prefix)).exists(): fn_true = FMT_TRAIN_SUMMARY_PATH.format(prefix=prefix) df_true = pd.read_csv(fn_true) # # Remove prefix "PAN_" # df_true.loc[:, 'ImageId'] = df_true.ImageId.str[4:] fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test) df_test_image_ids = df_test.ImageId.unique() fn_out = FMT_VALTESTTRUTH_PATH.format(prefix) with open(fn_out, 'w') as f: f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n") df_true = df_true[df_true.ImageId.isin(df_test_image_ids)] for idx, r in df_true.iterrows(): f.write("{},{},\"{}\",{:.6f}\n".format( r.ImageId, r.BuildingId, r.PolygonWKT_Pix, 1.0)) def validate_all_score(): header_line = [] lines = [] for area_id in range(2, 6): prefix = area_id_to_prefix(area_id) assert Path(FMT_VALTESTTRUTH_PATH.format(prefix)).exists() with open(FMT_VALTESTTRUTH_PATH.format(prefix), 'r') as f: header_line = f.readline() lines += f.readlines() with open(FMT_VALTESTTRUTH_OVALL_PATH, 'w') as f: f.write(header_line) for line in lines: f.write(line) # Predicted polygons header_line = [] lines = [] for area_id in range(2, 6): prefix = area_id_to_prefix(area_id) assert Path(FMT_VALTESTPOLY_PATH.format(prefix)).exists() with open(FMT_VALTESTPOLY_PATH.format(prefix), 'r') as f: header_line = f.readline() lines += f.readlines() with open(FMT_VALTESTPOLY_OVALL_PATH, 'w') as f: f.write(header_line) for line in lines: f.write(line) def generate_valtest_batch(area_id, batch_size=8, immean=None, enable_tqdm=False): prefix = area_id_to_prefix(area_id) df_train = pd.read_csv(FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)) fn_im = FMT_VALTEST_MUL_STORE.format(prefix) fn_mask = FMT_VALTEST_MASK_STORE.format(prefix) image_id_list = df_train.ImageId.tolist() if enable_tqdm: pbar = tqdm.tqdm(total=len(image_id_list)) while 1: total_sz = len(image_id_list) n_batch = int(math.floor(total_sz / batch_size) + 1) with tb.open_file(fn_im, 'r') as f_im,\ tb.open_file(fn_mask, 'r') as f_mask: for i_batch in range(n_batch): target_image_ids = image_id_list[ i_batch*batch_size:(i_batch+1)*batch_size ] if len(target_image_ids) == 0: continue X_train = [] y_train = [] for image_id in target_image_ids: im = np.array(f_im.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_train.append(im) mask = np.array(f_mask.get_node('/' + image_id)) mask = (mask > 0).astype(np.uint8) y_train.append(mask) X_train = np.array(X_train) y_train = np.array(y_train) y_train = y_train.reshape((-1, 1, INPUT_SIZE, INPUT_SIZE)) if immean is not None: X_train = X_train - immean if enable_tqdm: pbar.update(y_train.shape[0]) yield (X_train, y_train) if enable_tqdm: pbar.close() def generate_valtrain_batch(area_id, batch_size=8, immean=None): prefix = area_id_to_prefix(area_id) df_train = pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix)) fn_im = FMT_VALTRAIN_MUL_STORE.format(prefix) fn_mask = FMT_VALTRAIN_MASK_STORE.format(prefix) image_id_list = df_train.ImageId.tolist() np.random.shuffle(image_id_list) while 1: total_sz = len(image_id_list) n_batch = int(math.floor(total_sz / batch_size) + 1) with tb.open_file(fn_im, 'r') as f_im,\ tb.open_file(fn_mask, 'r') as f_mask: for i_batch in range(n_batch): target_image_ids = image_id_list[ i_batch*batch_size:(i_batch+1)*batch_size ] if len(target_image_ids) == 0: continue X_train = [] y_train = [] for image_id in target_image_ids: im = np.array(f_im.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_train.append(im) mask = np.array(f_mask.get_node('/' + image_id)) mask = (mask > 0).astype(np.uint8) y_train.append(mask) X_train = np.array(X_train) y_train = np.array(y_train) y_train = y_train.reshape((-1, 1, INPUT_SIZE, INPUT_SIZE)) if immean is not None: X_train = X_train - immean yield (X_train, y_train) def _get_test_data(area_id): prefix = area_id_to_prefix(area_id) fn_test = FMT_TEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test) X_test = [] fn_im = FMT_TEST_IM_STORE.format(prefix) with tb.open_file(fn_im, 'r') as f: for idx, image_id in enumerate(df_test.ImageId.tolist()): im = np.array(f.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_test.append(im) X_test = np.array(X_test) return X_test def _get_valtest_data(area_id): prefix = area_id_to_prefix(area_id) fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test) X_val = [] fn_im = FMT_VALTEST_IM_STORE.format(prefix) with tb.open_file(fn_im, 'r') as f: for idx, image_id in enumerate(df_test.ImageId.tolist()): im = np.array(f.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_val.append(im) X_val = np.array(X_val) y_val = [] fn_mask = FMT_VALTEST_MASK_STORE.format(prefix) with tb.open_file(fn_mask, 'r') as f: for idx, image_id in enumerate(df_test.ImageId.tolist()): mask = np.array(f.get_node('/' + image_id)) mask = (mask > 0.5).astype(np.uint8) y_val.append(mask) y_val = np.array(y_val) y_val = y_val.reshape((-1, 1, INPUT_SIZE, INPUT_SIZE)) return X_val, y_val def _get_valtrain_data(area_id): prefix = area_id_to_prefix(area_id) fn_train = FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix) df_train = pd.read_csv(fn_train) X_val = [] fn_im = FMT_VALTRAIN_IM_STORE.format(prefix) with tb.open_file(fn_im, 'r') as f: for idx, image_id in enumerate(df_train.ImageId.tolist()): im = np.array(f.get_node('/' + image_id)) im = np.swapaxes(im, 0, 2) im = np.swapaxes(im, 1, 2) X_val.append(im) X_val = np.array(X_val) y_val = [] fn_mask = FMT_VALTRAIN_MASK_STORE.format(prefix) with tb.open_file(fn_mask, 'r') as f: for idx, image_id in enumerate(df_train.ImageId.tolist()): mask = np.array(f.get_node('/' + image_id)) mask = (mask > 0.5).astype(np.uint8) y_val.append(mask) y_val = np.array(y_val) y_val = y_val.reshape((-1, 1, INPUT_SIZE, INPUT_SIZE)) return X_val, y_val def predict(area_id): prefix = area_id_to_prefix(area_id) X_test = _get_test_mul_data(area_id) X_mean = get_mul_mean_image(area_id) # Load model weights # Predict and Save prediction result model = get_unet() model.load_weights(FMT_VALMODEL_PATH.format(prefix)) y_pred = model.predict(X_test - X_mean, batch_size=8, verbose=1) del model # Save prediction result fn = FMT_TESTPRED_PATH.format(prefix) with tb.open_file(fn, 'w') as f: atom = tb.Atom.from_dtype(y_pred.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, 'pred', atom, y_pred.shape, filters=filters) ds[:] = y_pred def _internal_validate_predict_best_param(area_id, enable_tqdm=False): param = _get_model_parameter(area_id) epoch = param['fn_epoch'] y_pred = _internal_validate_predict( area_id, epoch=epoch, save_pred=False, enable_tqdm=enable_tqdm) return y_pred def _internal_validate_predict(area_id, epoch=3, save_pred=True, enable_tqdm=False): prefix = area_id_to_prefix(area_id) X_mean = get_mul_mean_image(area_id) # Load model weights # Predict and Save prediction result fn_model = FMT_VALMODEL_PATH.format(prefix + '_{epoch:02d}') fn_model = fn_model.format(epoch=epoch) model = get_unet() model.load_weights(fn_model) fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test, index_col='ImageId') y_pred = model.predict_generator( generate_valtest_batch( area_id, batch_size=64, immean=X_mean, enable_tqdm=enable_tqdm, ), val_samples=len(df_test), ) del model # Save prediction result if save_pred: fn = FMT_VALTESTPRED_PATH.format(prefix) with tb.open_file(fn, 'w') as f: atom = tb.Atom.from_dtype(y_pred.dtype) filters = tb.Filters(complib='blosc', complevel=9) ds = f.create_carray(f.root, 'pred', atom, y_pred.shape, filters=filters) ds[:] = y_pred return y_pred def _internal_validate_fscore_wo_pred_file(area_id, epoch=3, min_th=MIN_POLYGON_AREA, enable_tqdm=False): prefix = area_id_to_prefix(area_id) # Prediction phase logger.info("Prediction phase") y_pred = _internal_validate_predict( area_id, epoch=epoch, save_pred=False, enable_tqdm=enable_tqdm) # Postprocessing phase logger.info("Postprocessing phase") fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test, index_col='ImageId') fn = FMT_VALTESTPRED_PATH.format(prefix) fn_out = FMT_VALTESTPOLY_PATH.format(prefix) with open(fn_out, 'w') as f: f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n") test_list = df_test.index.tolist() iterator = enumerate(test_list) for idx, image_id in tqdm.tqdm(iterator, total=len(test_list)): df_poly = mask_to_poly(y_pred[idx][0], min_polygon_area_th=min_th) if len(df_poly) > 0: for i, row in df_poly.iterrows(): line = "{},{},\"{}\",{:.6f}\n".format( image_id, row.bid, row.wkt, row.area_ratio) line = _remove_interiors(line) f.write(line) else: f.write("{},{},{},0\n".format( image_id, -1, "POLYGON EMPTY")) # ------------------------ # Validation solution file logger.info("Validation solution file") fn_true = FMT_TRAIN_SUMMARY_PATH.format(prefix=prefix) df_true = pd.read_csv(fn_true) # # Remove prefix "PAN_" # df_true.loc[:, 'ImageId'] = df_true.ImageId.str[4:] fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test) df_test_image_ids = df_test.ImageId.unique() fn_out = FMT_VALTESTTRUTH_PATH.format(prefix) with open(fn_out, 'w') as f: f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n") df_true = df_true[df_true.ImageId.isin(df_test_image_ids)] for idx, r in df_true.iterrows(): f.write("{},{},\"{}\",{:.6f}\n".format( r.ImageId, r.BuildingId, r.PolygonWKT_Pix, 1.0)) def _internal_validate_fscore(area_id, epoch=3, predict=True, min_th=MIN_POLYGON_AREA, enable_tqdm=False): prefix = area_id_to_prefix(area_id) # Prediction phase logger.info("Prediction phase") if predict: _internal_validate_predict( area_id, epoch=epoch, enable_tqdm=enable_tqdm) # Postprocessing phase logger.info("Postprocessing phase") fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test, index_col='ImageId') fn = FMT_VALTESTPRED_PATH.format(prefix) fn_out = FMT_VALTESTPOLY_PATH.format(prefix) with open(fn_out, 'w') as f,\ tb.open_file(fn, 'r') as fr: y_pred = np.array(fr.get_node('/pred')) f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n") test_list = df_test.index.tolist() iterator = enumerate(test_list) for idx, image_id in tqdm.tqdm(iterator, total=len(test_list)): df_poly = mask_to_poly(y_pred[idx][0], min_polygon_area_th=min_th) if len(df_poly) > 0: for i, row in df_poly.iterrows(): line = "{},{},\"{}\",{:.6f}\n".format( image_id, row.bid, row.wkt, row.area_ratio) line = _remove_interiors(line) f.write(line) else: f.write("{},{},{},0\n".format( image_id, -1, "POLYGON EMPTY")) # ------------------------ # Validation solution file logger.info("Validation solution file") # if not Path(FMT_VALTESTTRUTH_PATH.format(prefix)).exists(): if True: fn_true = FMT_TRAIN_SUMMARY_PATH.format(prefix=prefix) df_true = pd.read_csv(fn_true) # # Remove prefix "PAN_" # df_true.loc[:, 'ImageId'] = df_true.ImageId.str[4:] fn_test = FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test) df_test_image_ids = df_test.ImageId.unique() fn_out = FMT_VALTESTTRUTH_PATH.format(prefix) with open(fn_out, 'w') as f: f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n") df_true = df_true[df_true.ImageId.isin(df_test_image_ids)] for idx, r in df_true.iterrows(): f.write("{},{},\"{}\",{:.6f}\n".format( r.ImageId, r.BuildingId, r.PolygonWKT_Pix, 1.0)) @click.group() def cli(): pass @cli.command() @click.argument('datapath', type=str) def validate(datapath): area_id = directory_name_to_area_id(datapath) prefix = area_id_to_prefix(area_id) logger.info(">> validate sub-command: {}".format(prefix)) X_mean = get_mul_mean_image(area_id) X_val, y_val = _get_valtest_mul_data(area_id) X_val = X_val - X_mean if not Path(MODEL_DIR).exists(): Path(MODEL_DIR).mkdir(parents=True) logger.info("load valtrain") X_trn, y_trn = _get_valtrain_mul_data(area_id) X_trn = X_trn - X_mean model = get_unet() model_checkpoint = ModelCheckpoint( FMT_VALMODEL_PATH.format(prefix + "_{epoch:02d}"), monitor='val_jaccard_coef_int', save_best_only=False) model_earlystop = EarlyStopping( monitor='val_jaccard_coef_int', patience=10, verbose=0, mode='max') model_history = History() df_train = pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix)) logger.info("Fit") model.fit( X_trn, y_trn, nb_epoch=200, shuffle=True, verbose=1, validation_data=(X_val, y_val), callbacks=[model_checkpoint, model_earlystop, model_history]) model.save_weights(FMT_VALMODEL_LAST_PATH.format(prefix)) # Save evaluation history pd.DataFrame(model_history.history).to_csv( FMT_VALMODEL_HIST.format(prefix), index=False) logger.info(">> validate sub-command: {} ... Done".format(prefix)) @cli.command() @click.argument('datapath', type=str) def testproc(datapath): area_id = directory_name_to_area_id(datapath) prefix = area_id_to_prefix(area_id) logger.info(">>>> Test proc for {}".format(prefix)) _internal_test(area_id) logger.info(">>>> Test proc for {} ... done".format(prefix)) @cli.command() @click.argument('datapath', type=str) def evalfscore(datapath): area_id = directory_name_to_area_id(datapath) prefix = area_id_to_prefix(area_id) logger.info("Evaluate fscore on validation set: {}".format(prefix)) # for each epoch # if not Path(FMT_VALMODEL_EVALHIST.format(prefix)).exists(): if True: df_hist = pd.read_csv(FMT_VALMODEL_HIST.format(prefix)) df_hist.loc[:, 'epoch'] = list(range(1, len(df_hist) + 1)) rows = [] for zero_base_epoch in range(0, len(df_hist)): logger.info(">>> Epoch: {}".format(zero_base_epoch)) _internal_validate_fscore_wo_pred_file( area_id, epoch=zero_base_epoch, enable_tqdm=True, min_th=MIN_POLYGON_AREA) evaluate_record = _calc_fscore_per_aoi(area_id) evaluate_record['zero_base_epoch'] = zero_base_epoch evaluate_record['min_area_th'] = MIN_POLYGON_AREA evaluate_record['area_id'] = area_id logger.info("\n" + json.dumps(evaluate_record, indent=4)) rows.append(evaluate_record) pd.DataFrame(rows).to_csv( FMT_VALMODEL_EVALHIST.format(prefix), index=False) # find best min-poly-threshold df_evalhist = pd.read_csv(FMT_VALMODEL_EVALHIST.format(prefix)) best_row = df_evalhist.sort_values(by='fscore', ascending=False).iloc[0] best_epoch = int(best_row.zero_base_epoch) best_fscore = best_row.fscore # optimize min area th rows = [] for th in [30, 60, 90, 120, 150, 180, 210, 240]: logger.info(">>> TH: {}".format(th)) predict_flag = False if th == 30: predict_flag = True _internal_validate_fscore( area_id, epoch=best_epoch, enable_tqdm=True, min_th=th, predict=predict_flag) evaluate_record = _calc_fscore_per_aoi(area_id) evaluate_record['zero_base_epoch'] = best_epoch evaluate_record['min_area_th'] = th evaluate_record['area_id'] = area_id logger.info("\n" + json.dumps(evaluate_record, indent=4)) rows.append(evaluate_record) pd.DataFrame(rows).to_csv( FMT_VALMODEL_EVALTHHIST.format(prefix), index=False) logger.info("Evaluate fscore on validation set: {} .. done".format(prefix)) def mask_to_poly(mask, min_polygon_area_th=MIN_POLYGON_AREA): """ Convert from 256x256 mask to polygons on 650x650 image """ mask = (skimage.transform.resize(mask, (650, 650)) > 0.5).astype(np.uint8) shapes = rasterio.features.shapes(mask.astype(np.int16), mask > 0) poly_list = [] mp = shapely.ops.cascaded_union( shapely.geometry.MultiPolygon([ shapely.geometry.shape(shape) for shape, value in shapes ])) if isinstance(mp, shapely.geometry.Polygon): df = pd.DataFrame({ 'area_size': [mp.area], 'poly': [mp], }) else: df = pd.DataFrame({ 'area_size': [p.area for p in mp], 'poly': [p for p in mp], }) df = df[df.area_size > min_polygon_area_th].sort_values( by='area_size', ascending=False) df.loc[:, 'wkt'] = df.poly.apply(lambda x: shapely.wkt.dumps( x, rounding_precision=0)) df.loc[:, 'bid'] = list(range(1, len(df) + 1)) df.loc[:, 'area_ratio'] = df.area_size / df.area_size.max() return df def postproc(area_id): # Mask to poly print(area_id) prefix = area_id_to_prefix(area_id) fn_test = FMT_TEST_IMAGELIST_PATH.format(prefix=prefix) df_test = pd.read_csv(fn_test, index_col='ImageId') fn = FMT_TESTPRED_PATH.format(prefix) with tb.open_file(fn, 'r') as f: y_pred = np.array(f.get_node('/pred')) print(y_pred.shape) fn_out = FMT_TESTPOLY_PATH.format(prefix) with open(fn_out, 'w') as f: f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n") for idx, image_id in enumerate(df_test.index.tolist()): df_poly = mask_to_poly(y_pred[idx][0]) if len(df_poly) > 0: for i, row in df_poly.iterrows(): f.write("{},{},\"{}\",{:.6f}\n".format( image_id, row.bid, row.wkt, row.area_ratio)) else: f.write("{},{},{},0\n".format( image_id, -1, "POLYGON EMPTY")) def merge(): df_list = [] for area_id in range(2, 6): prefix = area_id_to_prefix(area_id) df_part = pd.read_csv( FMT_TESTPOLY_PATH.format(prefix)) df_list.append(df_part) df = pd.concat(df_list) df.to_csv(FN_SOLUTION_CSV, index=False) with open(FN_SOLUTION_CSV, 'r') as f: lines = f.readlines() with open(FN_SOLUTION_CSV, 'w') as f: f.write(lines[0]) for line in lines[1:]: line = _remove_interiors(line) f.write(line) if __name__ == '__main__': cli()
35.034072
79
0.604495
9,038
64,778
4.019916
0.057203
0.030221
0.028074
0.026038
0.80458
0.770285
0.733898
0.693741
0.677337
0.656721
0
0.015977
0.272438
64,778
1,848
80
35.05303
0.754912
0.029887
0
0.645848
0
0
0.062755
0.016549
0
0
0
0
0.006863
1
0.039808
false
0.001373
0.019218
0
0.085106
0.002745
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
67e3dc13a1b35006643aaa0824b10d5857fb6b8b
198
py
Python
hackerrank/python-sort-sort/solution.py
SamProkopchuk/coding-problems
fa0ca2c05ac90e41945de1a5751e5545a8459ac4
[ "MIT" ]
null
null
null
hackerrank/python-sort-sort/solution.py
SamProkopchuk/coding-problems
fa0ca2c05ac90e41945de1a5751e5545a8459ac4
[ "MIT" ]
null
null
null
hackerrank/python-sort-sort/solution.py
SamProkopchuk/coding-problems
fa0ca2c05ac90e41945de1a5751e5545a8459ac4
[ "MIT" ]
null
null
null
n, m = map(int, input().strip().split()) matrix = [list(map(int, input().strip().split())) for _ in range(n)] k = int(input().strip()) for lst in sorted(matrix, key=lambda l: l[k]): print(*lst)
33
68
0.606061
34
198
3.5
0.558824
0.201681
0.327731
0.268908
0.352941
0
0
0
0
0
0
0
0.141414
198
5
69
39.6
0.7
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
db0fc1f5c06cbc189e0b49852b5c6f2bd3629ed0
231
py
Python
typed_environment_configuration/__init__.py
springload/typed_environment_configuration
9d88f067fcbd1e4d896c15084aa35ccc020b43ac
[ "MIT" ]
1
2019-12-02T03:42:12.000Z
2019-12-02T03:42:12.000Z
typed_environment_configuration/__init__.py
springload/typed_environment_configuration
9d88f067fcbd1e4d896c15084aa35ccc020b43ac
[ "MIT" ]
3
2020-07-15T02:43:52.000Z
2020-07-21T02:41:47.000Z
typed_environment_configuration/__init__.py
springload/typed_environment_configuration
9d88f067fcbd1e4d896c15084aa35ccc020b43ac
[ "MIT" ]
1
2022-03-08T20:55:02.000Z
2022-03-08T20:55:02.000Z
# -*- coding: utf-8 -*- """Top-level package for Typed Environment Configuration.""" __author__ = """Eugene Dementyev""" __email__ = "eugene@springload.co.nz" __version__ = '0.1.4' from .typed_environment_configuration import *
23.1
60
0.718615
27
231
5.62963
0.851852
0.210526
0.381579
0
0
0
0
0
0
0
0
0.019704
0.121212
231
9
61
25.666667
0.729064
0.333333
0
0
0
0
0.297297
0.155405
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
db25cdd2c0e67e26adf5ea6063dfe30cf7097124
95
py
Python
python/34.find-first-and-last-position-of-element-in-sorted-array.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
python/34.find-first-and-last-position-of-element-in-sorted-array.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
python/34.find-first-and-last-position-of-element-in-sorted-array.py
stavanmehta/leetcode
1224e43ce29430c840e65daae3b343182e24709c
[ "Apache-2.0" ]
null
null
null
class Solution: def searchRange(self, nums: List[int], target: int) -> List[int]:
23.75
69
0.610526
12
95
4.833333
0.75
0.241379
0
0
0
0
0
0
0
0
0
0
0.242105
95
3
70
31.666667
0.805556
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4